diff --git a/.gitignore b/.gitignore
index 64f216752296e4eb029c46d4b6b9dd802d3bdc48..39dcc8a554d1be3f8a4216dab35124b50d98b21b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -136,3 +136,12 @@ dmypy.json
 
 # Cython debug symbols
 cython_debug/
+
+# big dataset of SCMARK
+*.h5ad
+
+## nohup output
+nohup.out
+
+## vscode config
+.vscode/
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index fe4c821724306916c3ae169f64eb9a1a89b18017..68843360f1f8ce5475e7c5d07c605dc7fdfb3168 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,10 +1,27 @@
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.2.0
+    rev: v4.4.0
     hooks:
     -   id: trailing-whitespace
     -   id: end-of-file-fixer
   - repo: https://github.com/psf/black
-    rev: 22.3.0
+    rev: 23.3.0
     hooks:
       - id: black
+  # - repo: local
+  #   hooks:
+  #     - id: pylint
+  #       name: pylint
+  #       entry: pylint
+  #       language: system
+  #       types: [python]
+  #       args:
+  #         [
+  #           "-rn",
+  #           "-sn",
+  #           "--load-plugins=pylint.extensions.docparams",
+  #           "--accept-no-yields-doc=no",
+  #           "--accept-no-return-doc=no",
+  #           "--accept-no-raise-doc=no",
+  #           "--accept-no-param-doc=no",
+  #         ]
diff --git a/README.md b/README.md
index 5f2103da2a75c8872e3a2013f7c129a76cff0df6..266c4fe18ea1986a16d53ff1ddacba486b83f8e9 100644
--- a/README.md
+++ b/README.md
@@ -2,26 +2,26 @@
 
 ### Description of the package
 
-The package implements 3 differents classes that fits a PLN-PCA model (described in the mathematical description above). Basically, it tries to find the correlation between features and the effect of covariables on these features. As main characteristic, this model takes into account count data. 
-- The fastPLN class fits a PLN model (a special PLN-PCA model) using variational approximation. 
-- The fastPLNPCA class fits a PLN-PCA  using variational approximation. 
-- The IMPS_PLN fits a PLN-PCA model using Importance sampling. 
+The package implements 3 differents classes that fits a PLN-PCA model (described in the mathematical description above). Basically, it tries to find the correlation between features and the effect of covariables on these features. As main characteristic, this model takes into account count data.
+- The fastPLN class fits a PLN model (a special PLN-PCA model) using variational approximation.
+- The fastPLNPCA class fits a PLN-PCA  using variational approximation.
+- The IMPS_PLN fits a PLN-PCA model using Importance sampling.
 
-IMPS_PLN is always slower than fastPLN. fastPLNPCA is faster than fastPLN only for datasets with very large number of genes (p>5000, see [here](https://github.com/PLN-team/PLNpy/blob/master/images/Comparison_fastPLN_vs_fastPLNPCA_n%3D1000.png)). However, 
-fastPLNPCA is convenient since it allows to get the Principal Components (PCs) directly, in contrary to fastPLN. To get the PCs using fastPLN, you first need to fit the model and do a PCA on the matrix $\Sigma$  found. The numerical complexity is always linear with respect to the number of samples n (see [here](https://github.com/PLN-team/PLNpy/blob/master/images/Comparison_fastPLN_vs_fastPLNPCA_p%3D1000.png)) 
+IMPS_PLN is always slower than fastPLN. fastPLNPCA is faster than fastPLN only for datasets with very large number of genes (p>5000, see [here](https://github.com/PLN-team/PLNpy/blob/master/images/Comparison_fastPLN_vs_fastPLNPCA_n%3D1000.png)). However,
+fastPLNPCA is convenient since it allows to get the Principal Components (PCs) directly, in contrary to fastPLN. To get the PCs using fastPLN, you first need to fit the model and do a PCA on the matrix $\Sigma$  found. The numerical complexity is always linear with respect to the number of samples n_samples (see [here](https://github.com/PLN-team/PLNpy/blob/master/images/Comparison_fastPLN_vs_fastPLNPCA_p%3D1000.png))
 
 
-All of these class are aggregated into the class PLNmodel, so that you don't need to deal with multiple classes. This class will automatically fit the data with one of those classes.  
+All of these class are aggregated into the class PLNmodel, so that you don't need to deal with multiple classes. This class will automatically fit the data with one of those classes.
 
-### How to use the package? 
+### How to use the package?
 
-First, you need to pip install the package. We recommend to create a new environment before installing the package.  
+First, you need to pip install the package. We recommend to create a new environment before installing the package.
 
 ```
 pip install pyPLNmodels
 ```
 
-The package comes with an artificial dataset to present the functionality. You can load it doing the following: 
+The package comes with an artificial dataset to present the functionality. You can load it doing the following:
 
 ```
 import pandas as pd
@@ -30,11 +30,11 @@ O = pd.read_csv('example_data/O_test')
 cov = pd.read_csv('example_data/cov_test')
 ```
 
-If you want $q$ Principal Composants, you only need to call: 
+If you want $q$ Principal Composants, you only need to call:
 
 ```
 from pyPLNmodels.models import PLNmodel
-nbpcs = 5 # number of principal components 
+nbpcs = 5 # number of principal components
 mypln = PLNmodel(q= nbpcs)
 mypln.fit(Y,O,cov)
 print(mypln)
@@ -43,7 +43,7 @@ print(mypln)
 Note that if you do not specify $q$, it will take the maximum possible value. You can look for a better approximation by setting ```fast = False ``` in the ```.fit()``` method, but it will take much more time:
 
 ```
-nbpcs = 5 
+nbpcs = 5
 mypln = PLNmodel(nbpcs)
 mypln.fit(Y,O,cov, fast = False)
 print(mypln)
@@ -51,9 +51,9 @@ print(mypln)
 
 
 
-###### How to get the model parameters back ? 
+###### How to get the model parameters back ?
 
-You can get the model parameters back running: 
+You can get the model parameters back running:
 
 ```
 beta = mypln.get_beta()
@@ -64,14 +64,14 @@ Sigma = mypln.get_Sigma()
 
 
 
-<strong>This class automatically picks the right model among ```fastPLN, fastPLNPCA``` and  ```IMPS_PLN```. If you want to know more about each of these algorithms that are quite different, you can check below.</strong> 
+<strong>This class automatically picks the right model among ```fastPLN, fastPLNPCA``` and  ```IMPS_PLN```. If you want to know more about each of these algorithms that are quite different, you can check below.</strong>
 
-### How to fit each model? 
+### How to fit each model?
 
 #### Fit the PLN model
 
- 
-You have to call : 
+
+You have to call :
 
 ```
 from pyPLNmodels.models import fastPLN
@@ -85,18 +85,18 @@ print(fast)
 ##### Hyperparameters
 
 Here are the main hyperparameters of the ```.fit()``` method of the ```fastPLN``` object:
-- ```N_iter_max```: The maximum number of iteration you are ready to do. If the algorithm has not converged, you can try to increase it. Default is 200. 
+- ```N_iter_max```: The maximum number of iteration you are ready to do. If the algorithm has not converged, you can try to increase it. Default is 200.
 - ```tol```: tolerance of the model. The algorithm will stop if the ELBO (approximated likelihood of the model) has not increased of more than ```tol```. Try to decrease it if the algorithm has not converged. Default is ```1e-1```
-- ```good_init```: If set to ```True```, the algorithm will do an initialization that can take some time, especially for large datasets. You can set it to ```False``` if you want a much faster but random initialization. Default is ```True```. 
+- ```good_init```: If set to ```True```, the algorithm will do an initialization that can take some time, especially for large datasets. You can set it to ```False``` if you want a much faster but random initialization. Default is ```True```.
 
-Those 3 parameters are important. However, they won't change the asymptotic behavior of the algorithm. If you launch the algorithm for a sufficient time (i.e. ```tol``` is small enough and ```N_iter_max``` is big enough), it will converge to the right parameters independently of the hyperparameters. Moreover, the default arguments are convenient for most datasets. 
-If you want to see the progress of the algorithm in real time, you can set ```Verbose = True``` in the .```fit()``` method. 
+Those 3 parameters are important. However, they won't change the asymptotic behavior of the algorithm. If you launch the algorithm for a sufficient time (i.e. ```tol``` is small enough and ```N_iter_max``` is big enough), it will converge to the right parameters independently of the hyperparameters. Moreover, the default arguments are convenient for most datasets.
+If you want to see the progress of the algorithm in real time, you can set ```Verbose = True``` in the .```fit()``` method.
 
 ##### How to be sure the algorithm has converged ?
 
-Basically, if the ELBO reaches a plateau, the algorithm has converged. If it has not reached a plateau, then you can try to increase the number of iteration ```N_iter_max``` or lower the tolerance ```tol```. 
+Basically, if the ELBO reaches a plateau, the algorithm has converged. If it has not reached a plateau, then you can try to increase the number of iteration ```N_iter_max``` or lower the tolerance ```tol```.
 
-Note that you don't need to restart the algorithm from the beginning, you can start from where the algorithm has stopped by calling: 
+Note that you don't need to restart the algorithm from the beginning, you can start from where the algorithm has stopped by calling:
 
 ```
 fast.fit(Y,O,cov, N_iter_max = 500, tol = 1e-5)
@@ -118,18 +118,18 @@ print(fastpca)
 
 ![](images/fastPLNPCA_screenshot.png)
 
-The hyperparameters of the ```.fit()``` method are the same as for the ```fastPLN``` object. Only the Default values  of ```N_iter_max ``` and ```tol``` are differents: 
+The hyperparameters of the ```.fit()``` method are the same as for the ```fastPLN``` object. Only the Default values  of ```N_iter_max ``` and ```tol``` are differents:
 
 - ```N_iter_max ``` default is : 5000
 - ```tol  ``` default is : 1e-3
 
-You can check if the algorithm has converged following the same guidelines as for ```fastPLN```. 
+You can check if the algorithm has converged following the same guidelines as for ```fastPLN```.
 The numerical complexity is linear with respect to the number of genes p.
 
 ### Fit the IMPS_PLN model
 
 
-To fit the IMPS based model, you need to declare the number of Principal composents, and then you can fit the model:  
+To fit the IMPS based model, you need to declare the number of Principal composents, and then you can fit the model:
 ```
 from pyPLNmodels.models import IMPS_PLN
 nbpcs = 5
@@ -145,49 +145,49 @@ print(imps)
 
 ##### Hyperparameters
 
-The hyperparameters of the ```.fit()``` method of the ```IMPS_PLN``` are more complicated and technical. We suggest to take a look at the mathematical description of the package to gain intuition. Basically, the ```IMPS_PLN``` estimates the gradients of the log likelihood with importance sampling. Here are the main  hyperparameters and their impacts: 
-- ```acc```: the accuracy of the approximation. The lower the better the gradient approximation, but the lower the algorithm. Default is 0.005 You can try to increasing it if you want to be faster. However reducing it won't gain much accuracy, and will significantly increase the convergence time. 
-- ``` N_epoch_max```: The maximum number of iteration you are ready to do. If the tolerance has not converged, you can try to increase it. Default is 500. 
-- ```lr```: Learning rate of the gradient ascent. You can try to reduce it or lower it, and see if the final likelihood has improved. Default is 0.1. 
-- ```batch_size```: The batch size of the gradient descent. The larger the more accurate the gradients, but the slower the algorithm. if you have very large datasets, you can try to increase it. If you decrease it, then you hsould also decrease the learning rate. Default is 40. Should not exceed the number of samples you have in your dataset. 
-- ```optimizer```: The optimizer you take for the gradient ascent. You can try ```torch.optim.RMSprop```, which is more robust to inappropriate learning rates. However lower the learning rate to 0.01 if using ```torch.optim.RMSprop```. Default is ```torch.optim.Adagrad```. 
-- ```nb_plateau```: The algorithm will stop if the likelihood of the model has not increased during ```nb_plateau``` epochs. Default is 15. 
-- ```nb_trigger```: Since the likelihood is approximated and random, we consider that the likelihood does not increase if during ```nb_trigger``` iterations it has not improved from the maximum likelihood computed. This parameter is here to deal with the randomness of the criterion.   Default is 5. 
-- ```good_init```: If set to ```True```, the algorithm will do a precise initialization (that takes some time). You can remove this step by setting ```good_init = False ```. Default is True. 
-
-You can see the progress of the algorithm in real time by setting ```verbose = True``` in the ```.fit()``` method. 
+The hyperparameters of the ```.fit()``` method of the ```IMPS_PLN``` are more complicated and technical. We suggest to take a look at the mathematical description of the package to gain intuition. Basically, the ```IMPS_PLN``` estimates the gradients of the log likelihood with importance sampling. Here are the main  hyperparameters and their impacts:
+- ```acc```: the accuracy of the approximation. The lower the better the gradient approximation, but the lower the algorithm. Default is 0.005 You can try to increasing it if you want to be faster. However reducing it won't gain much accuracy, and will significantly increase the convergence time.
+- ``` N_epoch_max```: The maximum number of iteration you are ready to do. If the tolerance has not converged, you can try to increase it. Default is 500.
+- ```lr```: Learning rate of the gradient ascent. You can try to reduce it or lower it, and see if the final likelihood has improved. Default is 0.1.
+- ```batch_size```: The batch size of the gradient descent. The larger the more accurate the gradients, but the slower the algorithm. if you have very large datasets, you can try to increase it. If you decrease it, then you hsould also decrease the learning rate. Default is 40. Should not exceed the number of samples you have in your dataset.
+- ```optimizer```: The optimizer you take for the gradient ascent. You can try ```torch.optim.RMSprop```, which is more robust to inappropriate learning rates. However lower the learning rate to 0.01 if using ```torch.optim.RMSprop```. Default is ```torch.optim.Adagrad```.
+- ```nb_plateau```: The algorithm will stop if the likelihood of the model has not increased during ```nb_plateau``` epochs. Default is 15.
+- ```nb_trigger```: Since the likelihood is approximated and random, we consider that the likelihood does not increase if during ```nb_trigger``` iterations it has not improved from the maximum likelihood computed. This parameter is here to deal with the randomness of the criterion.   Default is 5.
+- ```good_init```: If set to ```True```, the algorithm will do a precise initialization (that takes some time). You can remove this step by setting ```good_init = False ```. Default is True.
+
+You can see the progress of the algorithm in real time by setting ```verbose = True``` in the ```.fit()``` method.
 The numerical complexity is linear with respect to the number of genes p.
 
-##### How to be sure the algorithm has converged ? 
+##### How to be sure the algorithm has converged ?
 
-Unfortunately, there is no heuristics to know if the algorithm has converged. Indeed, even if you reach a plateau, it is possible that you can reach a much better plateau with other hyperparameters. However, this is in fact due to the choice of ```torch.optim.Adagrad``` as optimizer. If it has converged, it will be a very good solution. To have a (fast) convergence, you need to take the learning rate in the right interval. Fortunately, it is quite large: about ```[0.01, 0.3]``` for many cases. 
+Unfortunately, there is no heuristics to know if the algorithm has converged. Indeed, even if you reach a plateau, it is possible that you can reach a much better plateau with other hyperparameters. However, this is in fact due to the choice of ```torch.optim.Adagrad``` as optimizer. If it has converged, it will be a very good solution. To have a (fast) convergence, you need to take the learning rate in the right interval. Fortunately, it is quite large: about ```[0.01, 0.3]``` for many cases.
 
-If you have still not converged, you can try to change the optimizer to ```torch.optim.RMSprop```, but lower the learning to 0.02 or lower. You can also increase the batch_size and the number of iteration you do. If your dataset is not too big, as a last resort, you can try to set the learning rate to 0.1, taking as optimizer ```torch.optim.Rprop``` and set the ```batch_size``` to the number of samples you have in your dataset. 
+If you have still not converged, you can try to change the optimizer to ```torch.optim.RMSprop```, but lower the learning to 0.02 or lower. You can also increase the batch_size and the number of iteration you do. If your dataset is not too big, as a last resort, you can try to set the learning rate to 0.1, taking as optimizer ```torch.optim.Rprop``` and set the ```batch_size``` to the number of samples you have in your dataset.
 
 
-### How to retrieve the parameters of the model ? 
+### How to retrieve the parameters of the model ?
 
 After fitting the model, one can retrieve the parameters of the model. To retrieve $\beta$, you only need to call:
 
 ```beta_chap = model.get_beta() ```
 
-To retrieve $\Sigma$, you only need to call: 
+To retrieve $\Sigma$, you only need to call:
 
 ```Sigma_chap = model.get_Sigma()```
 
-Note that for the PCA models, this matrix won't be invertible. 
+Note that for the PCA models, this matrix won't be invertible.
 
-To retrieve $C$, you only need to call: 
+To retrieve $C$, you only need to call:
 
 ```C_chap = model.get_C()```
 
-For the fastPLN object, you will get a Matrix of size $(p,p)$ containing the eigenvectors of $\Sigma$ numberred progressively from the eigenvectors with largest eigenvalue to the lowest. 
+For the fastPLN object, you will get a Matrix of size $(p,p)$ containing the eigenvectors of $\Sigma$ numberred progressively from the eigenvectors with largest eigenvalue to the lowest.
 
 
 
-## Quick mathematical description of the package. 
+## Quick mathematical description of the package.
 
-The package tries to infer the parameters of two models: 
+The package tries to infer the parameters of two models:
 
 - Poisson Log Normal-Principal Composent Analysis model (PLN-PCA)
 - Poisson Log Normal model (PLN) (special case of PLN-PCA model)
@@ -195,7 +195,7 @@ The package tries to infer the parameters of two models:
 
 
 
-We consider the following model PLN-PCA model:  
+We consider the following model PLN-PCA model:
 
 - Consider $n$ samples $(i=1 \ldots n)$
 
@@ -204,38 +204,38 @@ $x_{i h}=$ (covariate) for sample $i$ (altitude, temperature, categorical covari
 
 - Consider $p$ features (genes) $(j=1 \ldots p)$ Measure $Y=\left(Y_{i j}\right)_{1 \leq i \leq n, 1 \leq j \leq p}$ :
 
-- Measure $Y = Y_{i j}=$ number of times the feature $j$ is observed in sample $i$. 
+- Measure $Y = Y_{i j}=$ number of times the feature $j$ is observed in sample $i$.
 
 - Associate a random vector $Z_{i}$ with each sample.
 - Assume that the unknown $\left(W_{i}\right)_{1 \leq i \leq n}$ are independant and living in a space of dimension $q\leq p$  such that:
 
 $$
-\begin{aligned} 
+\begin{aligned}
 W_{i} & \sim \mathcal{N}_p\left(0, I_{q}\right)  \\
 Z_{i} &=\beta^{\top}\mathbf{x}_{i} +\mathbf{C}W_i  \in \mathbb R^p \\
 Y_{i j} \mid Z_{i j} & \sim \mathcal{P}\left(\exp \left(o_{ij} + Z_{i j}\right)\right)
 \end{aligned}
 $$
 
-and $C\in \mathbb R^{p\times q}$, $\beta \in \mathbb R^{d\times p}$. 
+and $C\in \mathbb R^{p\times q}$, $\beta \in \mathbb R^{d\times p}$.
 
-Where $O = (o_{ij})_{1\leq i\leq n, 1\leq j\leq p}$ are known offsets. 
+Where $O = (o_{ij})_{1\leq i\leq n, 1\leq j\leq p}$ are known offsets.
 
-We can see that 
+We can see that
 
 $$Z_{i} \sim \mathcal N_p (\beta^{\top}\mathbf{x}_{i}, \Sigma) $$
 
-The unknown parameter is $\theta = (\Sigma,\beta)$. The latent variable of the model can be seen as $Z$ or $W$. 
+The unknown parameter is $\theta = (\Sigma,\beta)$. The latent variable of the model can be seen as $Z$ or $W$.
 
 
-- When $p=q$, we call this model Poisson-Log Normal (PLN) model. In this case, $Z_i$ is a non-degenerate gaussian with mean  $\beta^{\top}\mathbf{x}_{i} \in \mathbb R^p$ and covariance matrix $\Sigma$.  
+- When $p=q$, we call this model Poisson-Log Normal (PLN) model. In this case, $Z_i$ is a non-degenerate gaussian with mean  $\beta^{\top}\mathbf{x}_{i} \in \mathbb R^p$ and covariance matrix $\Sigma$.
 - When $p<q$, we call this model  Poisson-Log Normal-Principal Component Analysis (PLN-PCA). Indeed, we are doing a PCA in the latent layer, estimating $\Sigma$ with a ranq $q$ matrix: $CC^{\top}$.
 
 The goal of this package is to retrieve $\theta$ from the observed data $(Y, O, X)$. To do so, we will try to maximize the log likelihood of the model:
 $$p_{\theta}(Y_i)  = \int_{\mathbb R^q} p_{\theta}(Y_i,W)dW \overset{\text{ (if } p=q\text{)}}{=} \int_{\mathbb R^p} p_{\theta}(Y_i,Z)dZ$$
 
-However, almost any integrals involving the law of the complete data is unreachable, so that we can't perform neither gradient ascent algorithms nor EM algorithm.   
-We adopt two different approaches to circumvent this problem: 
+However, almost any integrals involving the law of the complete data is unreachable, so that we can't perform neither gradient ascent algorithms nor EM algorithm.
+We adopt two different approaches to circumvent this problem:
 - Variational approximation of the latent layer (Variational EM)
 - Importance sampling based algorithm, using a gradient ascent method.
 
@@ -248,15 +248,15 @@ We adopt two different approaches to circumvent this problem:
 
 ## Variational approach
 
-We want here to use the EM algorithm, but the E step is unreachable, since the law $Z|Y_i$ (resp $W|Y_i$) is unknown and can't be integrated out. We thus choose to approximate the law of $Z|Y_i$ (resp $W|Y_i$) with a law $\phi_i(Z)$ (resp $\phi_i(W)$), where $\phi_i$ is taken among a family of law. We thus change the objective function: 
+We want here to use the EM algorithm, but the E step is unreachable, since the law $Z|Y_i$ (resp $W|Y_i$) is unknown and can't be integrated out. We thus choose to approximate the law of $Z|Y_i$ (resp $W|Y_i$) with a law $\phi_i(Z)$ (resp $\phi_i(W)$), where $\phi_i$ is taken among a family of law. We thus change the objective function:
 
-$$\begin{align} J_Y(\theta,\phi) & = \frac 1 n \sum _{i = 1}^n J_{Y_i}(\theta, \phi_i) \\ 
-J_{Y_i}(\theta, \phi_i)& =\log p_{\theta}(Y_i)-K L\left[\phi_i(Z_i) \|p_{\theta}(Z_i \mid Y_i)\right]\\ 
+$$\begin{align} J_Y(\theta,\phi) & = \frac 1 n \sum _{i = 1}^n J_{Y_i}(\theta, \phi_i) \\
+J_{Y_i}(\theta, \phi_i)& =\log p_{\theta}(Y_i)-K L\left[\phi_i(Z_i) \|p_{\theta}(Z_i \mid Y_i)\right]\\
 & = \mathbb{E}_{\phi_i}\left[\log p_{\theta}(Y_i, Z_i)\right] \underbrace{-\mathbb{E}_{\phi_i}[\log \phi_i(Z_i)]}_{\text {entropy } \mathcal{H}(\phi_i)} \\
 \end{align}$$
 
 
-We choose $\phi_i$ in a family distribution : 
+We choose $\phi_i$ in a family distribution :
 
 $$
 \phi_i \in \mathcal{Q}_{\text {diag}}=\{
@@ -264,13 +264,13 @@ $$
 , M_i \in \mathbb{M} ^q, S_i \in \mathbb{R} ^q\right\}
 $$
 
-We choose such a Gaussian approximation since $W$ is gaussian, so that $W|Y_i$ may be well approximated. However, taking a diagonal matrix as covariance breaks the dependecy induced by $Y_i$. 
+We choose such a Gaussian approximation since $W$ is gaussian, so that $W|Y_i$ may be well approximated. However, taking a diagonal matrix as covariance breaks the dependecy induced by $Y_i$.
 
-We can prove that $J_{Y_i}(\theta, \phi_i) \leq p_{\theta} (Y_i) \; \forall \phi_i$. The quantity $J_{Y}(\theta, \phi)$ is called the ELBO (Evidence Lower BOund).  
+We can prove that $J_{Y_i}(\theta, \phi_i) \leq p_{\theta} (Y_i) \; \forall \phi_i$. The quantity $J_{Y}(\theta, \phi)$ is called the ELBO (Evidence Lower BOund).
 
-#### Variational EM 
+#### Variational EM
 
-Given an intialisation $(\theta^0, q^0)$, the variational EM aims at maximizing the ELBO alternating between two steps: 
+Given an intialisation $(\theta^0, q^0)$, the variational EM aims at maximizing the ELBO alternating between two steps:
 
 -  VE step: update  $q$
 $$
@@ -280,17 +280,17 @@ $$
 $$
 \theta^{t+1}=\underset{\theta}{\arg \max } J_Y(\theta, q^{t+1})
 $$
-Each step is an optimisation problem that needs to be solved using analytical forms or gradient ascent. Note that $q$ is completely determined by $M = (M_i)_{1 \leq i \leq n } \in \mathbb R ^{n\times q}$ and $S = (S_i)_{1 \leq i \leq n } \in \mathbb R ^{n\times q}$, so that $J$ is a function of $(M, S, \beta, \Sigma)$. $q = (M,S)$ are the variational parameters, $\theta = (\beta, \Sigma$) are the model parameters.  
+Each step is an optimisation problem that needs to be solved using analytical forms or gradient ascent. Note that $q$ is completely determined by $M = (M_i)_{1 \leq i \leq n } \in \mathbb R ^{n\times q}$ and $S = (S_i)_{1 \leq i \leq n } \in \mathbb R ^{n\times q}$, so that $J$ is a function of $(M, S, \beta, \Sigma)$. $q = (M,S)$ are the variational parameters, $\theta = (\beta, \Sigma$) are the model parameters.
 
 
 ##### Case $p = q$
-The case $p=q$ does not perform dimension reduction, but is very fast to compute. 
-Indeed, computations show that the M-step is straightforward in this case as we can update $\Sigma$ and $\beta$ with an analytical form : 
+The case $p=q$ does not perform dimension reduction, but is very fast to compute.
+Indeed, computations show that the M-step is straightforward in this case as we can update $\Sigma$ and $\beta$ with an analytical form :
 
 $$
 \begin{aligned}
 \Sigma^{(t+1)} & = \frac{1}{n} \sum_{i}\left(\left((M^{(t)}-X\beta)_{i} (M^{(t)}-X\beta)_{i}\right)^{\top}+S^{(t)}_{i}\right)\\
-\beta^{(t+1)} &= (X^{\top}X)^{-1}X^{\top}M^{(t)} \\ 
+\beta^{(t+1)} &= (X^{\top}X)^{-1}X^{\top}M^{(t)} \\
 \end{aligned}
 $$
 This results in a fast algorithm, since we only need to go a gradient ascent on the variational parameters $M$ and $S$. Practice shows that we only need to do one gradient step of $M$ and $S$, update $\beta$ and $\Sigma$ with their closed form, then re-perform a gradient step on $M$ and $S$ and so on.
@@ -298,20 +298,20 @@ This results in a fast algorithm, since we only need to go a gradient ascent on
 
 ##### Case $p <q$
 
-When $p<q$, we do not have any analytical form and are forced to perform gradient ascent on all the parameters.  Practice shows that we can perform a gradient ascent on all the parameters at a time (doing each VE step and M step perfectly is quite inefficient). 
+When $p<q$, we do not have any analytical form and are forced to perform gradient ascent on all the parameters.  Practice shows that we can perform a gradient ascent on all the parameters at a time (doing each VE step and M step perfectly is quite inefficient).
 
 
 
 
-## Importance sampling based algorithm 
+## Importance sampling based algorithm
 
-In this section, we try to estimate the gradients with respect to $\theta = (C, \beta) $. 
+In this section, we try to estimate the gradients with respect to $\theta = (C, \beta) $.
 
-One can use importance sampling to estimate the likelihood: 
+One can use importance sampling to estimate the likelihood:
 
  $$p_{\theta}(Y_i) = \int \tilde p_{\theta}^{(u)}(W) \mathrm dW \approx \frac 1 {n_s} \sum_{k=1}^{n_s} \frac {\tilde p_{\theta}^{(u)}(V_k)}{g(V_k)}, ~ ~ ~(V_{k})_{1 \leq k \leq n_s} \overset{iid}{\sim} g$$
- 
-where $g$ is the importance law, $n_s$ is the sampling effort and  
+
+where $g$ is the importance law, $n_s$ is the sampling effort and
 
 
 $$\begin{array}{ll}
@@ -326,8 +326,8 @@ One can do the following approximation:
 
   $$\begin{equation}\label{one integral}
   \nabla _{\theta} \operatorname{log} p_{\theta}(Y_i) \approx \nabla_{\theta} \operatorname{log}\left(\frac 1 {n_s} \sum_{k=1}^{n_s} \frac {\tilde p_{\theta}^{(u)}(V_k)}{g(V_k)}\right)\end{equation}$$
-  
-And derive the gradients formula: 
+
+And derive the gradients formula:
 
 $$\nabla_{\beta} \operatorname{log} p_{\theta}(Y_i)\approx  X_iY_i^{\top} -\frac{\sum_{i = 1}^{n_s}\frac{\tilde p_{\theta}(V_k)}{g(V_k)}X_i\operatorname{exp}(O_i + \beta^{\top}X_i + CV_k)^{\top}}{\sum_{i = 1}^{n_s}\frac{\tilde p_{\theta}(V_k)}{g(V_k)}} $$
 
@@ -335,5 +335,5 @@ $$\nabla_{C} \operatorname{log} p_{\theta}(Y_i)\approx \frac{\sum_{i = 1}^{n_s}\
 $$$$
 
 
-Given the estimated gradients, we can run a gradient ascent to increase the likelihood. 
-We use algorithm of Variance reduction such as SAGA, SAG or SVRG, implemented in the VR.py file. 
+Given the estimated gradients, we can run a gradient ascent to increase the likelihood.
+We use algorithm of Variance reduction such as SAGA, SAG or SVRG, implemented in the VR.py file.
diff --git a/example_data/real_data/Y_mark.csv b/example_data/real_data/Y_mark.csv
new file mode 100644
index 0000000000000000000000000000000000000000..43c8888780d093037a81430871acfa74648809a2
--- /dev/null
+++ b/example_data/real_data/Y_mark.csv
@@ -0,0 +1,271 @@
+0.0,0.0,0.0,0.0,1.0,3.0,1.0,0.0,2.0,1.0,0.0,1.0,0.0,2.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,12.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,2.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,1.0,0.0,1.0,5.0,0.0,1.0,0.0,0.0,3.0,2.0,2.0,1.0,2.0,2.0,3.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,6.0,4.0,0.0,0.0,0.0,3.0,1.0,0.0,0.0,1.0,0.0,6.0,5.0,0.0,12.0,0.0,3.0,1.0,1.0,0.0,10.0,2.0,2.0,9.0,2.0,15.0,14.0,0.0,6.0,0.0,8.0,9.0,0.0,0.0,17.0,1.0,0.0,1.0
+4.0,0.0,0.0,0.0,2.0,2.0,0.0,4.0,1.0,1.0,1.0,0.0,0.0,1.0,3.0,2.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,4.0,3.0,0.0,0.0,0.0,0.0,2.0,0.0,7.0,0.0,0.0,0.0,3.0,2.0,0.0,7.0,3.0,1.0,0.0,1.0,5.0,0.0,3.0,0.0,0.0,3.0,0.0,2.0,0.0,2.0,1.0,1.0,0.0,3.0,1.0,2.0,0.0,5.0,0.0,10.0,3.0,3.0,0.0,1.0,3.0,4.0,0.0,4.0,3.0,3.0,7.0,7.0,0.0,7.0,1.0,3.0,1.0,0.0,0.0,8.0,0.0,7.0,16.0,8.0,10.0,18.0,1.0,5.0,0.0,7.0,5.0,9.0,0.0,33.0,11.0,0.0,12.0
+0.0,1.0,0.0,4.0,0.0,0.0,1.0,0.0,5.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,3.0,3.0,0.0,0.0,0.0,1.0,5.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,2.0,0.0,3.0,0.0,0.0,6.0,0.0,3.0,3.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,3.0,2.0,0.0,6.0,4.0,0.0,7.0,0.0,0.0,5.0,0.0,1.0,1.0,5.0,1.0,8.0,1.0,10.0,5.0,0.0,3.0,0.0,1.0,1.0,8.0,0.0,4.0,0.0,0.0,1.0
+0.0,3.0,0.0,5.0,0.0,7.0,0.0,2.0,3.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,8.0,6.0,1.0,0.0,2.0,0.0,1.0,0.0,5.0,0.0,8.0,1.0,0.0,0.0,2.0,6.0,3.0,1.0,6.0,3.0,0.0,4.0,12.0,0.0,5.0,0.0,0.0,6.0,1.0,3.0,1.0,3.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,23.0,6.0,0.0,0.0,0.0,13.0,3.0,0.0,9.0,13.0,3.0,21.0,1.0,0.0,5.0,1.0,3.0,3.0,0.0,0.0,3.0,2.0,5.0,20.0,0.0,9.0,19.0,0.0,4.0,0.0,4.0,1.0,7.0,0.0,20.0,7.0,0.0,7.0
+0.0,4.0,0.0,1.0,0.0,3.0,0.0,0.0,3.0,4.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,3.0,1.0,0.0,2.0,4.0,5.0,0.0,2.0,0.0,2.0,0.0,5.0,1.0,5.0,2.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,3.0,0.0,2.0,4.0,0.0,0.0,0.0,3.0,0.0,4.0,2.0,1.0,1.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,17.0,3.0,3.0,0.0,1.0,1.0,2.0,0.0,2.0,14.0,3.0,4.0,6.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,3.0,0.0,0.0,13.0,2.0,25.0,6.0,0.0,4.0,0.0,1.0,1.0,10.0,0.0,19.0,5.0,0.0,5.0
+1.0,1.0,0.0,1.0,0.0,6.0,0.0,0.0,1.0,2.0,8.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,3.0,0.0,4.0,4.0,3.0,0.0,5.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,8.0,0.0,0.0,4.0,15.0,0.0,13.0,0.0,3.0,2.0,0.0,0.0,32.0,0.0,2.0,16.0,1.0,25.0,9.0,0.0,0.0,0.0,1.0,3.0,17.0,0.0,10.0,0.0,0.0,2.0
+1.0,5.0,0.0,4.0,4.0,4.0,7.0,0.0,18.0,0.0,6.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,3.0,1.0,2.0,1.0,10.0,0.0,8.0,7.0,12.0,7.0,0.0,7.0,2.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,3.0,8.0,2.0,0.0,3.0,0.0,0.0,2.0,0.0,0.0,2.0,2.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,6.0,0.0,14.0,10.0,0.0,0.0,4.0,2.0,0.0,22.0,0.0,0.0,16.0,15.0,0.0,21.0,0.0,15.0,7.0,2.0,0.0,54.0,0.0,19.0,11.0,0.0,38.0,7.0,0.0,0.0,0.0,11.0,15.0,29.0,0.0,23.0,4.0,0.0,7.0
+1.0,6.0,0.0,9.0,0.0,2.0,1.0,0.0,11.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,11.0,0.0,0.0,1.0,5.0,6.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,11.0,16.0,1.0,2.0,15.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,1.0,8.0,6.0,0.0,0.0,5.0,3.0,0.0,3.0,14.0,0.0,3.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,8.0,3.0,2.0,4.0,14.0,0.0,1.0,0.0,4.0,21.0,11.0,0.0,48.0,7.0,0.0,5.0
+5.0,4.0,0.0,10.0,11.0,7.0,10.0,8.0,5.0,7.0,1.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,4.0,4.0,1.0,1.0,5.0,7.0,15.0,1.0,7.0,7.0,0.0,0.0,7.0,0.0,4.0,0.0,2.0,11.0,6.0,5.0,5.0,13.0,12.0,0.0,4.0,23.0,0.0,8.0,0.0,6.0,8.0,1.0,5.0,0.0,4.0,0.0,2.0,0.0,0.0,6.0,1.0,0.0,3.0,5.0,11.0,18.0,9.0,0.0,0.0,8.0,4.0,0.0,15.0,10.0,11.0,11.0,9.0,0.0,16.0,0.0,13.0,3.0,0.0,3.0,36.0,6.0,5.0,21.0,4.0,33.0,45.0,1.0,0.0,0.0,36.0,45.0,14.0,0.0,86.0,2.0,0.0,8.0
+0.0,6.0,0.0,6.0,3.0,5.0,2.0,2.0,4.0,1.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,5.0,0.0,4.0,11.0,3.0,5.0,3.0,1.0,0.0,5.0,8.0,1.0,1.0,0.0,0.0,15.0,10.0,4.0,0.0,13.0,23.0,0.0,1.0,16.0,0.0,10.0,1.0,1.0,7.0,0.0,12.0,0.0,1.0,0.0,4.0,2.0,1.0,1.0,0.0,0.0,5.0,3.0,4.0,13.0,5.0,0.0,2.0,2.0,3.0,0.0,26.0,5.0,3.0,17.0,22.0,1.0,25.0,0.0,6.0,6.0,0.0,3.0,55.0,4.0,7.0,27.0,1.0,31.0,46.0,2.0,7.0,0.0,5.0,17.0,8.0,0.0,100.0,16.0,0.0,6.0
+0.0,0.0,0.0,2.0,6.0,0.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,6.0,0.0,4.0,2.0,0.0,0.0,3.0,0.0,0.0,8.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,8.0,11.0,0.0,1.0,0.0,0.0,6.0,0.0,0.0,5.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,5.0,0.0,0.0,1.0,1.0,0.0,0.0,11.0,3.0,0.0,8.0,8.0,0.0,9.0,10.0,4.0,0.0,2.0,3.0,20.0,1.0,2.0,21.0,0.0,25.0,2.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,1.0,6.0,0.0,11.0
+1.0,0.0,0.0,4.0,10.0,2.0,1.0,3.0,3.0,4.0,4.0,0.0,0.0,1.0,1.0,2.0,0.0,0.0,1.0,2.0,2.0,0.0,5.0,15.0,0.0,0.0,1.0,6.0,0.0,8.0,6.0,2.0,1.0,5.0,0.0,5.0,13.0,10.0,0.0,1.0,11.0,0.0,2.0,6.0,4.0,19.0,0.0,2.0,7.0,0.0,16.0,0.0,4.0,3.0,0.0,1.0,1.0,2.0,0.0,0.0,0.0,3.0,5.0,5.0,3.0,1.0,1.0,6.0,1.0,0.0,19.0,2.0,2.0,25.0,29.0,0.0,23.0,4.0,9.0,4.0,0.0,3.0,37.0,6.0,6.0,30.0,1.0,63.0,27.0,2.0,0.0,0.0,12.0,16.0,16.0,5.0,65.0,9.0,0.0,6.0
+0.0,4.0,0.0,1.0,0.0,3.0,0.0,4.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,1.0,4.0,0.0,0.0,0.0,2.0,3.0,0.0,4.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,2.0,2.0,13.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,1.0,1.0,1.0,2.0,2.0,1.0,13.0,0.0,0.0,0.0,1.0,0.0,0.0,8.0,11.0,1.0,11.0,10.0,0.0,6.0,0.0,7.0,0.0,0.0,0.0,0.0,2.0,0.0,15.0,0.0,22.0,7.0,4.0,0.0,0.0,0.0,5.0,6.0,0.0,9.0,2.0,0.0,8.0
+4.0,4.0,0.0,11.0,4.0,11.0,4.0,1.0,8.0,9.0,3.0,1.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,0.0,10.0,3.0,7.0,0.0,3.0,0.0,6.0,7.0,0.0,0.0,6.0,1.0,1.0,0.0,2.0,1.0,2.0,0.0,5.0,5.0,0.0,0.0,2.0,1.0,0.0,4.0,0.0,0.0,9.0,3.0,5.0,1.0,0.0,8.0,8.0,0.0,1.0,5.0,0.0,1.0,2.0,13.0,4.0,21.0,5.0,0.0,3.0,2.0,10.0,0.0,10.0,4.0,6.0,8.0,19.0,0.0,12.0,0.0,8.0,6.0,1.0,0.0,32.0,12.0,21.0,12.0,17.0,32.0,10.0,0.0,30.0,0.0,12.0,14.0,18.0,0.0,4.0,16.0,0.0,8.0
+0.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,4.0,4.0,1.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,4.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,5.0,4.0,0.0,0.0,2.0,1.0,0.0,6.0,0.0,0.0,14.0,9.0,0.0,9.0,0.0,12.0,0.0,0.0,2.0,16.0,1.0,6.0,20.0,0.0,8.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0,4.0,0.0,0.0,0.0
+0.0,7.0,0.0,0.0,2.0,3.0,0.0,0.0,8.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,2.0,0.0,1.0,1.0,7.0,3.0,3.0,4.0,1.0,8.0,0.0,2.0,1.0,0.0,2.0,5.0,0.0,0.0,2.0,0.0,1.0,9.0,0.0,0.0,5.0,0.0,0.0,10.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,4.0,2.0,0.0,0.0,0.0,2.0,0.0,6.0,3.0,0.0,0.0,1.0,3.0,0.0,4.0,1.0,0.0,4.0,21.0,0.0,14.0,0.0,19.0,5.0,0.0,0.0,16.0,1.0,19.0,20.0,0.0,11.0,0.0,2.0,4.0,0.0,15.0,22.0,15.0,0.0,0.0,4.0,0.0,2.0
+1.0,2.0,0.0,2.0,1.0,5.0,0.0,0.0,6.0,0.0,4.0,1.0,1.0,0.0,0.0,0.0,6.0,0.0,2.0,0.0,0.0,0.0,11.0,1.0,0.0,4.0,7.0,3.0,0.0,1.0,3.0,1.0,2.0,4.0,0.0,0.0,3.0,0.0,0.0,8.0,4.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,5.0,0.0,0.0,4.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,11.0,3.0,0.0,0.0,1.0,1.0,0.0,7.0,0.0,0.0,14.0,8.0,0.0,7.0,0.0,17.0,2.0,0.0,1.0,6.0,0.0,5.0,14.0,0.0,20.0,17.0,0.0,2.0,0.0,12.0,14.0,15.0,0.0,25.0,1.0,0.0,5.0
+2.0,0.0,0.0,0.0,0.0,4.0,1.0,0.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,3.0,2.0,0.0,2.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,0.0,2.0,2.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,2.0,0.0,1.0,1.0,0.0,1.0,0.0,2.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0,3.0,11.0,6.0,1.0,2.0,12.0,0.0,13.0
+8.0,7.0,0.0,6.0,1.0,11.0,3.0,3.0,4.0,16.0,3.0,2.0,0.0,2.0,0.0,9.0,0.0,0.0,5.0,1.0,5.0,8.0,11.0,6.0,2.0,14.0,7.0,8.0,0.0,14.0,11.0,3.0,0.0,28.0,2.0,4.0,8.0,0.0,4.0,7.0,17.0,0.0,7.0,10.0,24.0,2.0,0.0,0.0,29.0,2.0,9.0,4.0,11.0,8.0,8.0,1.0,10.0,2.0,5.0,0.0,8.0,6.0,20.0,25.0,8.0,0.0,2.0,11.0,7.0,0.0,6.0,5.0,10.0,12.0,17.0,0.0,35.0,2.0,32.0,24.0,1.0,3.0,75.0,7.0,40.0,21.0,0.0,8.0,30.0,7.0,32.0,2.0,37.0,23.0,46.0,0.0,116.0,22.0,1.0,19.0
+10.0,14.0,0.0,7.0,1.0,13.0,4.0,4.0,12.0,2.0,13.0,2.0,8.0,4.0,4.0,4.0,0.0,0.0,11.0,6.0,2.0,6.0,18.0,49.0,12.0,21.0,13.0,14.0,0.0,7.0,30.0,7.0,10.0,45.0,11.0,13.0,23.0,0.0,21.0,17.0,34.0,1.0,24.0,13.0,58.0,26.0,0.0,0.0,44.0,14.0,33.0,0.0,29.0,10.0,10.0,3.0,4.0,7.0,3.0,0.0,17.0,16.0,56.0,27.0,17.0,1.0,2.0,36.0,14.0,0.0,17.0,30.0,9.0,13.0,22.0,0.0,38.0,2.0,30.0,39.0,2.0,21.0,28.0,19.0,85.0,38.0,9.0,23.0,79.0,9.0,10.0,0.0,92.0,100.0,106.0,0.0,179.0,30.0,0.0,38.0
+6.0,41.0,0.0,13.0,11.0,18.0,22.0,33.0,10.0,29.0,19.0,8.0,0.0,5.0,1.0,13.0,0.0,0.0,7.0,8.0,4.0,28.0,26.0,28.0,21.0,1.0,23.0,17.0,0.0,14.0,10.0,49.0,12.0,0.0,9.0,53.0,36.0,0.0,7.0,16.0,37.0,0.0,25.0,41.0,0.0,22.0,4.0,0.0,24.0,1.0,29.0,2.0,21.0,14.0,10.0,0.0,15.0,4.0,15.0,0.0,21.0,65.0,70.0,42.0,22.0,0.0,1.0,46.0,31.0,0.0,18.0,35.0,17.0,13.0,32.0,0.0,29.0,3.0,56.0,41.0,4.0,9.0,36.0,26.0,64.0,33.0,19.0,34.0,167.0,3.0,91.0,15.0,87.0,72.0,80.0,1.0,241.0,58.0,6.0,49.0
+9.0,23.0,1.0,18.0,11.0,11.0,15.0,2.0,17.0,26.0,19.0,0.0,0.0,4.0,2.0,10.0,2.0,0.0,0.0,8.0,6.0,2.0,27.0,23.0,7.0,22.0,23.0,28.0,0.0,6.0,5.0,36.0,3.0,0.0,4.0,41.0,35.0,0.0,3.0,20.0,47.0,0.0,8.0,44.0,0.0,25.0,0.0,0.0,20.0,2.0,24.0,0.0,2.0,8.0,1.0,1.0,3.0,7.0,1.0,0.0,5.0,44.0,72.0,44.0,10.0,0.0,0.0,19.0,17.0,0.0,7.0,90.0,5.0,18.0,17.0,0.0,10.0,1.0,30.0,23.0,2.0,0.0,44.0,8.0,29.0,13.0,10.0,24.0,117.0,1.0,25.0,0.0,24.0,69.0,44.0,2.0,215.0,26.0,4.0,28.0
+8.0,27.0,1.0,18.0,37.0,13.0,9.0,11.0,16.0,39.0,22.0,2.0,0.0,13.0,10.0,12.0,2.0,1.0,5.0,5.0,8.0,0.0,29.0,20.0,11.0,0.0,19.0,17.0,1.0,19.0,8.0,67.0,9.0,1.0,4.0,36.0,23.0,0.0,1.0,21.0,36.0,1.0,12.0,61.0,0.0,42.0,0.0,17.0,18.0,10.0,18.0,3.0,7.0,17.0,6.0,1.0,3.0,5.0,6.0,0.0,17.0,65.0,63.0,42.0,19.0,1.0,2.0,21.0,30.0,0.0,21.0,26.0,13.0,15.0,13.0,2.0,21.0,1.0,35.0,43.0,3.0,6.0,28.0,53.0,39.0,31.0,33.0,19.0,176.0,2.0,125.0,0.0,29.0,105.0,67.0,7.0,223.0,58.0,2.0,85.0
+15.0,22.0,0.0,15.0,34.0,1.0,11.0,10.0,18.0,9.0,19.0,4.0,8.0,9.0,1.0,16.0,3.0,3.0,6.0,10.0,19.0,0.0,18.0,25.0,20.0,22.0,12.0,21.0,0.0,18.0,16.0,15.0,7.0,21.0,5.0,8.0,27.0,14.0,13.0,23.0,12.0,9.0,14.0,11.0,1.0,51.0,9.0,29.0,14.0,1.0,33.0,6.0,20.0,9.0,8.0,29.0,16.0,30.0,7.0,1.0,15.0,80.0,15.0,36.0,22.0,9.0,19.0,18.0,48.0,28.0,26.0,30.0,48.0,29.0,38.0,6.0,40.0,10.0,62.0,66.0,16.0,13.0,40.0,69.0,62.0,46.0,40.0,30.0,60.0,65.0,158.0,129.0,59.0,187.0,147.0,138.0,128.0,131.0,4.0,296.0
+5.0,8.0,0.0,11.0,19.0,7.0,12.0,6.0,15.0,6.0,21.0,3.0,0.0,5.0,3.0,13.0,11.0,0.0,0.0,8.0,11.0,6.0,13.0,12.0,13.0,20.0,10.0,17.0,0.0,12.0,5.0,30.0,13.0,1.0,6.0,14.0,10.0,2.0,16.0,13.0,39.0,1.0,9.0,22.0,0.0,27.0,4.0,0.0,14.0,9.0,5.0,7.0,11.0,11.0,8.0,1.0,1.0,11.0,4.0,0.0,24.0,62.0,32.0,17.0,31.0,0.0,7.0,29.0,29.0,3.0,17.0,22.0,9.0,17.0,18.0,2.0,7.0,1.0,29.0,39.0,3.0,3.0,11.0,38.0,34.0,12.0,15.0,14.0,82.0,6.0,151.0,0.0,54.0,116.0,75.0,4.0,172.0,40.0,4.0,35.0
+8.0,23.0,5.0,14.0,4.0,13.0,18.0,5.0,5.0,16.0,21.0,12.0,0.0,13.0,0.0,8.0,16.0,0.0,4.0,7.0,4.0,20.0,22.0,0.0,4.0,2.0,19.0,12.0,0.0,10.0,2.0,20.0,10.0,1.0,16.0,27.0,10.0,1.0,9.0,12.0,37.0,0.0,15.0,35.0,0.0,2.0,14.0,0.0,18.0,9.0,6.0,17.0,13.0,5.0,14.0,0.0,28.0,5.0,18.0,0.0,5.0,44.0,70.0,29.0,18.0,4.0,5.0,32.0,14.0,5.0,21.0,50.0,11.0,12.0,31.0,2.0,16.0,0.0,46.0,22.0,22.0,6.0,6.0,22.0,17.0,26.0,42.0,36.0,128.0,6.0,15.0,2.0,55.0,85.0,36.0,2.0,184.0,41.0,16.0,27.0
+13.0,23.0,17.0,8.0,0.0,4.0,11.0,11.0,10.0,8.0,6.0,8.0,0.0,13.0,1.0,13.0,0.0,75.0,8.0,9.0,9.0,7.0,10.0,0.0,8.0,3.0,13.0,15.0,0.0,12.0,11.0,10.0,11.0,0.0,1.0,3.0,4.0,6.0,6.0,14.0,13.0,2.0,11.0,7.0,0.0,13.0,7.0,5.0,20.0,2.0,8.0,6.0,13.0,6.0,1.0,0.0,18.0,2.0,5.0,0.0,19.0,35.0,13.0,15.0,10.0,2.0,5.0,10.0,10.0,2.0,11.0,25.0,9.0,11.0,15.0,0.0,8.0,4.0,23.0,8.0,5.0,28.0,5.0,26.0,22.0,21.0,24.0,17.0,50.0,7.0,20.0,1.0,60.0,50.0,51.0,3.0,73.0,65.0,6.0,104.0
+12.0,21.0,1.0,15.0,18.0,4.0,4.0,15.0,7.0,12.0,12.0,0.0,1.0,1.0,0.0,2.0,1.0,0.0,6.0,12.0,15.0,1.0,4.0,10.0,10.0,15.0,5.0,9.0,0.0,34.0,9.0,4.0,3.0,3.0,2.0,4.0,31.0,19.0,4.0,11.0,8.0,11.0,8.0,7.0,22.0,36.0,0.0,40.0,18.0,6.0,21.0,9.0,5.0,32.0,1.0,15.0,15.0,7.0,8.0,0.0,25.0,34.0,16.0,59.0,8.0,0.0,5.0,22.0,24.0,21.0,56.0,23.0,29.0,30.0,61.0,10.0,36.0,12.0,92.0,39.0,7.0,22.0,50.0,79.0,36.0,50.0,22.0,79.0,30.0,22.0,21.0,1.0,33.0,66.0,84.0,49.0,80.0,40.0,2.0,117.0
+7.0,1.0,0.0,4.0,1.0,1.0,2.0,3.0,1.0,0.0,1.0,0.0,3.0,0.0,2.0,2.0,0.0,0.0,0.0,3.0,3.0,0.0,6.0,1.0,2.0,7.0,3.0,3.0,0.0,10.0,4.0,1.0,0.0,2.0,0.0,3.0,2.0,8.0,1.0,3.0,2.0,1.0,0.0,4.0,59.0,5.0,1.0,22.0,1.0,3.0,1.0,0.0,4.0,7.0,0.0,6.0,3.0,3.0,0.0,0.0,8.0,11.0,3.0,22.0,4.0,0.0,6.0,4.0,3.0,5.0,18.0,15.0,7.0,9.0,18.0,2.0,12.0,0.0,19.0,11.0,1.0,1.0,0.0,15.0,6.0,14.0,0.0,16.0,19.0,12.0,3.0,0.0,13.0,33.0,18.0,7.0,28.0,10.0,1.0,21.0
+1.0,6.0,0.0,3.0,6.0,1.0,2.0,3.0,3.0,0.0,7.0,0.0,0.0,2.0,0.0,3.0,2.0,0.0,2.0,1.0,0.0,0.0,14.0,5.0,3.0,9.0,6.0,3.0,1.0,8.0,3.0,0.0,3.0,0.0,0.0,0.0,8.0,0.0,1.0,3.0,6.0,0.0,5.0,4.0,0.0,18.0,0.0,1.0,15.0,4.0,6.0,0.0,3.0,3.0,1.0,1.0,5.0,7.0,3.0,0.0,2.0,11.0,1.0,5.0,7.0,0.0,0.0,2.0,8.0,7.0,18.0,0.0,2.0,15.0,10.0,0.0,3.0,0.0,9.0,12.0,0.0,2.0,10.0,16.0,15.0,8.0,3.0,22.0,14.0,15.0,0.0,4.0,5.0,25.0,21.0,2.0,17.0,20.0,104.0,4.0
+2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,3.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,3.0,0.0,2.0,0.0,2.0,1.0,1.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,2.0,0.0,2.0,4.0,0.0,4.0,9.0,0.0,2.0,1.0,2.0,4.0,0.0,2.0,0.0,1.0,1.0,1.0,0.0,0.0,3.0,1.0,5.0,2.0,1.0,9.0,1.0,4.0,6.0,3.0,1.0,2.0,1.0,5.0,5.0,13.0,1.0,7.0,7.0,14.0,10.0,14.0
+2.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,2.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,4.0,1.0,1.0,3.0,5.0,1.0,1.0,6.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,3.0,0.0,0.0,3.0,1.0,0.0,0.0,2.0,0.0,0.0,2.0,1.0,0.0,3.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,0.0,2.0,1.0,0.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,8.0,1.0,3.0,0.0,6.0,7.0,1.0,1.0,12.0,10.0,6.0,1.0,3.0,1.0,2.0,0.0,2.0,15.0,9.0,10.0,9.0,5.0,6.0,6.0,15.0,11.0
+27.0,12.0,3.0,3.0,7.0,14.0,13.0,16.0,5.0,2.0,16.0,6.0,2.0,5.0,10.0,16.0,10.0,1.0,5.0,18.0,17.0,7.0,18.0,4.0,27.0,16.0,3.0,8.0,0.0,6.0,24.0,9.0,14.0,3.0,8.0,7.0,7.0,15.0,7.0,20.0,11.0,14.0,15.0,8.0,0.0,26.0,8.0,10.0,15.0,57.0,16.0,4.0,25.0,6.0,7.0,15.0,16.0,18.0,14.0,1.0,18.0,24.0,15.0,15.0,31.0,27.0,6.0,29.0,23.0,15.0,20.0,10.0,29.0,20.0,21.0,5.0,31.0,0.0,40.0,41.0,4.0,18.0,14.0,38.0,59.0,38.0,50.0,31.0,62.0,26.0,64.0,71.0,109.0,169.0,152.0,47.0,98.0,71.0,66.0,204.0
+0.0,2.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,1.0,1.0,2.0,2.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,4.0,1.0,1.0,2.0,0.0,0.0,2.0,1.0,2.0,1.0,1.0,1.0,3.0,2.0,1.0,2.0,1.0,0.0,0.0,2.0,3.0,4.0,1.0,2.0,3.0,1.0,1.0,1.0,0.0,2.0,2.0,1.0,0.0,1.0,3.0,4.0,1.0,2.0,1.0,3.0,7.0,2.0,1.0,3.0,9.0,6.0,2.0,9.0,11.0,6.0,16.0
+3.0,6.0,0.0,2.0,3.0,4.0,2.0,2.0,3.0,4.0,11.0,0.0,0.0,0.0,2.0,0.0,3.0,0.0,0.0,1.0,1.0,1.0,3.0,1.0,6.0,0.0,2.0,4.0,0.0,4.0,1.0,1.0,2.0,1.0,0.0,2.0,5.0,0.0,0.0,0.0,10.0,1.0,3.0,6.0,0.0,18.0,1.0,0.0,6.0,2.0,3.0,1.0,0.0,3.0,3.0,0.0,2.0,4.0,1.0,0.0,5.0,4.0,1.0,18.0,8.0,0.0,1.0,1.0,4.0,0.0,6.0,6.0,6.0,12.0,19.0,1.0,6.0,0.0,16.0,6.0,8.0,0.0,1.0,7.0,8.0,15.0,4.0,8.0,44.0,5.0,6.0,0.0,21.0,30.0,20.0,4.0,13.0,17.0,18.0,16.0
+2.0,2.0,1.0,3.0,2.0,1.0,4.0,4.0,0.0,1.0,1.0,2.0,0.0,1.0,2.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,2.0,5.0,2.0,2.0,1.0,2.0,0.0,0.0,2.0,0.0,1.0,0.0,2.0,0.0,1.0,0.0,1.0,2.0,6.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,4.0,3.0,2.0,0.0,1.0,1.0,3.0,0.0,1.0,0.0,1.0,1.0,3.0,0.0,1.0,3.0,3.0,4.0,4.0,5.0,5.0,2.0,5.0,3.0,4.0,5.0,11.0,1.0,6.0,2.0,5.0,5.0,4.0,5.0,10.0,7.0,8.0,6.0,7.0,6.0,9.0,5.0,9.0,14.0,9.0,8.0,7.0,8.0,19.0,25.0
+13.0,13.0,2.0,4.0,13.0,14.0,7.0,13.0,13.0,3.0,15.0,4.0,20.0,0.0,1.0,10.0,11.0,0.0,12.0,17.0,18.0,1.0,10.0,2.0,11.0,6.0,12.0,16.0,2.0,15.0,9.0,3.0,6.0,8.0,5.0,5.0,12.0,13.0,16.0,8.0,5.0,6.0,16.0,10.0,10.0,3.0,6.0,7.0,6.0,2.0,4.0,10.0,13.0,1.0,8.0,14.0,26.0,19.0,37.0,0.0,11.0,33.0,21.0,22.0,26.0,25.0,21.0,32.0,39.0,50.0,24.0,20.0,18.0,37.0,27.0,16.0,16.0,2.0,56.0,34.0,34.0,17.0,6.0,48.0,65.0,48.0,29.0,39.0,33.0,33.0,64.0,35.0,95.0,146.0,117.0,64.0,80.0,75.0,48.0,215.0
+3.0,2.0,9.0,4.0,0.0,1.0,2.0,11.0,1.0,0.0,3.0,7.0,9.0,4.0,2.0,7.0,1.0,0.0,4.0,12.0,7.0,0.0,2.0,0.0,6.0,2.0,2.0,5.0,2.0,12.0,2.0,1.0,2.0,0.0,7.0,3.0,0.0,1.0,5.0,4.0,9.0,7.0,13.0,6.0,14.0,1.0,9.0,0.0,3.0,4.0,2.0,19.0,12.0,0.0,6.0,9.0,13.0,10.0,10.0,5.0,8.0,17.0,6.0,9.0,9.0,8.0,4.0,3.0,9.0,25.0,19.0,11.0,16.0,16.0,18.0,12.0,7.0,0.0,18.0,20.0,27.0,18.0,7.0,35.0,16.0,13.0,9.0,7.0,17.0,23.0,11.0,13.0,28.0,38.0,60.0,40.0,36.0,28.0,156.0,90.0
+1.0,0.0,2.0,0.0,0.0,0.0,1.0,3.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,5.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,4.0,5.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,5.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,4.0,0.0,2.0,0.0,1.0,1.0,3.0,1.0,0.0,3.0,6.0,0.0,1.0,0.0,1.0,2.0,4.0,0.0,1.0,4.0,3.0,12.0,3.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,4.0,2.0,6.0,5.0,2.0,7.0,1.0,0.0,5.0,3.0,10.0,9.0,4.0,10.0,55.0,9.0
+0.0,0.0,0.0,2.0,1.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,3.0,0.0,0.0,1.0,0.0,1.0,2.0,0.0,2.0,0.0,1.0,3.0,1.0,1.0,4.0,1.0,3.0,0.0,0.0,1.0,3.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,4.0,3.0,0.0,0.0,1.0,1.0,7.0,2.0,1.0,2.0,2.0,2.0,1.0,3.0,1.0,3.0,2.0,4.0,2.0,6.0,1.0,3.0,4.0,2.0,3.0,1.0,0.0,8.0,3.0,4.0,8.0,8.0,2.0,2.0,2.0,14.0,10.0,5.0,2.0,7.0,2.0,17.0,16.0
+1.0,1.0,1.0,2.0,0.0,1.0,0.0,0.0,3.0,2.0,0.0,2.0,0.0,1.0,0.0,7.0,2.0,1.0,1.0,0.0,2.0,1.0,6.0,3.0,6.0,1.0,2.0,0.0,1.0,3.0,3.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,5.0,5.0,6.0,0.0,1.0,1.0,4.0,2.0,1.0,2.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,2.0,1.0,1.0,0.0,1.0,1.0,2.0,2.0,3.0,2.0,2.0,3.0,4.0,5.0,6.0,1.0,2.0,14.0,16.0,4.0,3.0,5.0,10.0,5.0,2.0,5.0,1.0,8.0,3.0,20.0,3.0,11.0,5.0,4.0,2.0,3.0,16.0,24.0,10.0,4.0,11.0,14.0,24.0,35.0
+0.0,0.0,2.0,0.0,0.0,1.0,1.0,3.0,0.0,0.0,1.0,0.0,0.0,2.0,1.0,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,1.0,3.0,2.0,0.0,1.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,0.0,4.0,1.0,5.0,1.0,5.0,8.0,0.0,0.0,0.0,3.0,0.0,1.0,1.0,1.0,1.0,1.0,6.0,0.0,1.0,0.0,5.0,5.0,3.0,0.0,7.0,4.0,4.0,4.0,0.0,3.0,2.0,2.0,5.0,4.0,2.0,6.0,0.0,3.0,6.0,6.0,11.0,5.0,2.0,11.0,54.0,25.0
+1.0,2.0,0.0,2.0,2.0,0.0,0.0,0.0,1.0,2.0,2.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,2.0,2.0,3.0,0.0,3.0,0.0,2.0,0.0,2.0,1.0,0.0,0.0,1.0,0.0,2.0,0.0,3.0,0.0,2.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,2.0,3.0,2.0,0.0,1.0,0.0,2.0,3.0,4.0,0.0,0.0,0.0,4.0,0.0,1.0,2.0,2.0,2.0,3.0,1.0,1.0,0.0,3.0,6.0,6.0,2.0,1.0,4.0,5.0,4.0,2.0,1.0,7.0,3.0,3.0,3.0,7.0,9.0,17.0,1.0,3.0,7.0,20.0,11.0
+0.0,0.0,0.0,1.0,0.0,1.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,1.0,1.0,2.0,2.0,2.0,0.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,0.0,0.0,7.0,1.0,0.0,0.0,2.0,3.0,0.0,1.0,1.0,4.0,0.0,1.0,1.0,2.0,0.0,2.0,1.0,2.0,0.0,1.0,0.0,2.0,4.0,2.0,1.0,1.0,9.0,1.0,0.0,0.0,0.0,4.0,3.0,0.0,6.0,2.0,2.0,3.0,3.0,1.0,10.0,7.0,8.0,4.0,9.0,8.0,16.0,19.0
+9.0,13.0,5.0,6.0,8.0,7.0,5.0,13.0,9.0,1.0,17.0,6.0,28.0,16.0,6.0,17.0,5.0,0.0,3.0,11.0,27.0,6.0,13.0,1.0,10.0,25.0,11.0,13.0,0.0,18.0,19.0,6.0,18.0,7.0,13.0,14.0,7.0,11.0,16.0,15.0,20.0,22.0,14.0,15.0,1.0,36.0,34.0,11.0,6.0,8.0,37.0,38.0,29.0,38.0,15.0,27.0,27.0,26.0,24.0,5.0,17.0,36.0,25.0,42.0,36.0,31.0,16.0,23.0,43.0,91.0,33.0,14.0,43.0,30.0,22.0,37.0,17.0,0.0,51.0,41.0,66.0,33.0,49.0,123.0,80.0,32.0,77.0,41.0,60.0,108.0,57.0,88.0,78.0,150.0,137.0,125.0,135.0,272.0,101.0,376.0
+12.0,4.0,0.0,1.0,4.0,2.0,13.0,15.0,8.0,10.0,7.0,10.0,7.0,1.0,3.0,17.0,2.0,1.0,10.0,10.0,10.0,0.0,10.0,18.0,12.0,11.0,4.0,6.0,1.0,12.0,11.0,4.0,7.0,5.0,4.0,5.0,41.0,13.0,11.0,7.0,11.0,11.0,16.0,5.0,32.0,24.0,2.0,13.0,11.0,6.0,40.0,2.0,8.0,8.0,11.0,12.0,9.0,11.0,14.0,1.0,17.0,26.0,13.0,35.0,28.0,5.0,7.0,19.0,19.0,22.0,15.0,15.0,19.0,18.0,32.0,12.0,19.0,50.0,47.0,32.0,19.0,43.0,12.0,85.0,29.0,26.0,23.0,52.0,23.0,61.0,11.0,15.0,80.0,79.0,59.0,33.0,92.0,101.0,10.0,303.0
+13.0,19.0,17.0,12.0,13.0,4.0,23.0,15.0,10.0,2.0,12.0,13.0,5.0,11.0,3.0,16.0,9.0,4.0,13.0,18.0,21.0,1.0,9.0,18.0,16.0,30.0,12.0,7.0,15.0,31.0,18.0,2.0,13.0,42.0,7.0,3.0,18.0,6.0,16.0,13.0,4.0,23.0,14.0,3.0,2.0,2.0,18.0,4.0,13.0,3.0,16.0,12.0,16.0,16.0,9.0,43.0,30.0,42.0,23.0,1.0,14.0,46.0,3.0,34.0,64.0,10.0,33.0,19.0,47.0,62.0,53.0,9.0,65.0,28.0,45.0,86.0,49.0,11.0,69.0,67.0,60.0,65.0,23.0,38.0,93.0,74.0,145.0,35.0,20.0,114.0,107.0,142.0,61.0,147.0,208.0,132.0,35.0,255.0,188.0,351.0
+2.0,5.0,0.0,3.0,3.0,5.0,5.0,6.0,3.0,7.0,12.0,6.0,6.0,1.0,2.0,4.0,2.0,1.0,0.0,8.0,18.0,1.0,3.0,8.0,3.0,12.0,3.0,1.0,1.0,18.0,3.0,7.0,4.0,0.0,6.0,10.0,8.0,7.0,7.0,8.0,5.0,10.0,1.0,3.0,0.0,6.0,10.0,6.0,6.0,6.0,2.0,4.0,4.0,1.0,2.0,8.0,8.0,10.0,13.0,1.0,3.0,28.0,18.0,13.0,8.0,0.0,2.0,8.0,16.0,18.0,26.0,6.0,28.0,21.0,18.0,15.0,6.0,0.0,26.0,23.0,22.0,35.0,6.0,28.0,27.0,24.0,37.0,12.0,62.0,30.0,48.0,22.0,10.0,78.0,72.0,53.0,83.0,92.0,2.0,190.0
+11.0,10.0,37.0,12.0,12.0,5.0,8.0,7.0,2.0,4.0,10.0,25.0,0.0,19.0,8.0,10.0,5.0,39.0,8.0,11.0,8.0,2.0,8.0,4.0,14.0,4.0,6.0,6.0,1.0,14.0,7.0,9.0,22.0,4.0,27.0,3.0,3.0,8.0,32.0,9.0,10.0,24.0,18.0,5.0,2.0,17.0,19.0,8.0,7.0,13.0,9.0,24.0,22.0,4.0,15.0,10.0,19.0,24.0,27.0,93.0,7.0,26.0,15.0,13.0,23.0,4.0,9.0,16.0,14.0,23.0,48.0,15.0,22.0,12.0,36.0,15.0,23.0,2.0,29.0,33.0,42.0,21.0,17.0,44.0,21.0,48.0,72.0,17.0,14.0,20.0,20.0,81.0,90.0,83.0,66.0,77.0,62.0,100.0,145.0,139.0
+19.0,11.0,5.0,7.0,7.0,3.0,34.0,39.0,7.0,0.0,8.0,20.0,1.0,5.0,93.0,25.0,8.0,9.0,7.0,27.0,25.0,8.0,20.0,6.0,37.0,24.0,10.0,10.0,1.0,20.0,21.0,7.0,18.0,3.0,16.0,10.0,13.0,0.0,20.0,23.0,17.0,49.0,53.0,5.0,0.0,35.0,14.0,0.0,31.0,104.0,28.0,17.0,31.0,3.0,21.0,24.0,29.0,41.0,46.0,2.0,30.0,30.0,17.0,33.0,68.0,10.0,35.0,37.0,72.0,56.0,44.0,26.0,70.0,34.0,56.0,31.0,52.0,7.0,59.0,120.0,46.0,74.0,30.0,100.0,106.0,73.0,127.0,46.0,73.0,210.0,104.0,190.0,199.0,212.0,250.0,120.0,119.0,141.0,159.0,533.0
+8.0,4.0,2.0,5.0,0.0,2.0,7.0,6.0,1.0,9.0,3.0,5.0,0.0,3.0,3.0,8.0,3.0,0.0,10.0,6.0,28.0,1.0,4.0,8.0,6.0,21.0,10.0,6.0,3.0,3.0,13.0,0.0,4.0,0.0,3.0,11.0,11.0,7.0,5.0,9.0,5.0,6.0,9.0,7.0,6.0,9.0,14.0,5.0,5.0,2.0,16.0,14.0,5.0,9.0,8.0,6.0,3.0,22.0,13.0,0.0,8.0,18.0,13.0,20.0,20.0,5.0,9.0,14.0,22.0,29.0,3.0,11.0,35.0,3.0,4.0,16.0,5.0,159.0,11.0,26.0,33.0,53.0,84.0,78.0,43.0,12.0,55.0,7.0,33.0,72.0,57.0,53.0,59.0,72.0,87.0,42.0,46.0,186.0,29.0,358.0
+15.0,16.0,9.0,6.0,8.0,5.0,16.0,15.0,2.0,7.0,2.0,18.0,13.0,20.0,4.0,15.0,5.0,18.0,13.0,11.0,10.0,9.0,10.0,6.0,23.0,20.0,5.0,13.0,0.0,22.0,9.0,0.0,32.0,1.0,13.0,0.0,15.0,12.0,13.0,16.0,2.0,16.0,45.0,6.0,16.0,14.0,3.0,15.0,14.0,7.0,19.0,7.0,10.0,12.0,11.0,14.0,6.0,26.0,15.0,3.0,12.0,29.0,4.0,15.0,39.0,1.0,17.0,38.0,26.0,17.0,41.0,6.0,18.0,19.0,57.0,4.0,33.0,4.0,53.0,39.0,56.0,43.0,82.0,66.0,41.0,37.0,33.0,75.0,14.0,101.0,38.0,190.0,189.0,155.0,81.0,26.0,53.0,109.0,412.0,344.0
+21.0,21.0,1.0,13.0,7.0,6.0,25.0,23.0,11.0,3.0,18.0,15.0,0.0,30.0,3.0,21.0,29.0,1.0,10.0,26.0,31.0,0.0,13.0,4.0,16.0,33.0,20.0,12.0,34.0,23.0,26.0,6.0,16.0,6.0,68.0,11.0,8.0,4.0,30.0,18.0,14.0,22.0,18.0,6.0,8.0,14.0,30.0,13.0,15.0,5.0,20.0,40.0,36.0,6.0,50.0,37.0,34.0,35.0,47.0,1.0,32.0,47.0,18.0,46.0,41.0,22.0,12.0,37.0,56.0,105.0,53.0,31.0,63.0,34.0,52.0,34.0,43.0,1.0,67.0,56.0,126.0,95.0,27.0,113.0,68.0,48.0,139.0,47.0,59.0,131.0,63.0,80.0,118.0,151.0,125.0,130.0,143.0,355.0,333.0,589.0
+9.0,3.0,13.0,1.0,1.0,4.0,9.0,10.0,2.0,5.0,5.0,6.0,0.0,8.0,0.0,7.0,3.0,0.0,10.0,13.0,9.0,0.0,3.0,8.0,11.0,1.0,1.0,2.0,5.0,16.0,4.0,2.0,4.0,1.0,6.0,6.0,0.0,3.0,7.0,7.0,7.0,15.0,11.0,6.0,0.0,5.0,11.0,0.0,9.0,1.0,13.0,11.0,10.0,5.0,6.0,11.0,15.0,12.0,13.0,10.0,7.0,15.0,3.0,10.0,14.0,8.0,5.0,8.0,12.0,26.0,24.0,7.0,27.0,14.0,29.0,14.0,16.0,1.0,51.0,29.0,37.0,32.0,0.0,35.0,35.0,28.0,17.0,32.0,31.0,51.0,67.0,31.0,74.0,64.0,63.0,53.0,49.0,54.0,65.0,136.0
+14.0,21.0,0.0,28.0,30.0,10.0,81.0,12.0,14.0,26.0,15.0,0.0,0.0,5.0,69.0,9.0,0.0,0.0,27.0,28.0,0.0,1.0,31.0,11.0,11.0,7.0,19.0,12.0,0.0,63.0,29.0,1.0,8.0,1.0,3.0,0.0,5.0,0.0,12.0,27.0,1.0,3.0,20.0,0.0,0.0,16.0,3.0,0.0,99.0,95.0,7.0,0.0,9.0,10.0,15.0,1.0,32.0,1.0,3.0,5.0,8.0,93.0,2.0,44.0,24.0,0.0,6.0,17.0,30.0,0.0,47.0,4.0,1.0,47.0,125.0,0.0,65.0,0.0,160.0,39.0,4.0,6.0,66.0,23.0,39.0,111.0,50.0,185.0,9.0,67.0,10.0,4.0,119.0,86.0,97.0,3.0,10.0,43.0,10.0,12.0
+9.0,15.0,0.0,10.0,0.0,3.0,8.0,5.0,3.0,1.0,11.0,4.0,0.0,0.0,1.0,6.0,4.0,0.0,2.0,3.0,14.0,2.0,10.0,3.0,10.0,1.0,9.0,5.0,0.0,15.0,7.0,5.0,12.0,0.0,10.0,0.0,9.0,0.0,6.0,12.0,18.0,0.0,17.0,6.0,1.0,8.0,2.0,0.0,19.0,6.0,10.0,3.0,7.0,5.0,4.0,0.0,2.0,9.0,4.0,0.0,7.0,27.0,16.0,21.0,10.0,0.0,3.0,15.0,36.0,0.0,19.0,26.0,8.0,22.0,24.0,1.0,22.0,5.0,28.0,27.0,2.0,2.0,16.0,54.0,25.0,26.0,3.0,23.0,27.0,0.0,29.0,2.0,64.0,38.0,70.0,1.0,53.0,44.0,0.0,59.0
+2.0,13.0,0.0,9.0,6.0,7.0,4.0,5.0,2.0,0.0,11.0,1.0,0.0,0.0,0.0,5.0,0.0,0.0,1.0,1.0,0.0,0.0,8.0,10.0,5.0,0.0,11.0,6.0,0.0,5.0,1.0,2.0,0.0,0.0,0.0,4.0,4.0,0.0,1.0,8.0,4.0,1.0,2.0,2.0,0.0,5.0,0.0,0.0,2.0,1.0,2.0,0.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,5.0,22.0,9.0,18.0,2.0,0.0,2.0,2.0,15.0,1.0,7.0,3.0,4.0,8.0,8.0,0.0,8.0,0.0,13.0,8.0,0.0,3.0,7.0,2.0,13.0,11.0,1.0,13.0,13.0,0.0,1.0,0.0,16.0,22.0,44.0,1.0,46.0,9.0,0.0,2.0
+9.0,11.0,1.0,4.0,4.0,10.0,10.0,3.0,7.0,12.0,13.0,1.0,0.0,4.0,0.0,5.0,9.0,0.0,8.0,8.0,10.0,12.0,10.0,0.0,17.0,5.0,17.0,6.0,1.0,39.0,4.0,9.0,7.0,0.0,0.0,6.0,22.0,0.0,4.0,11.0,29.0,2.0,6.0,19.0,0.0,29.0,2.0,0.0,6.0,17.0,22.0,0.0,12.0,6.0,6.0,1.0,0.0,22.0,5.0,0.0,13.0,39.0,20.0,28.0,16.0,0.0,1.0,14.0,12.0,2.0,36.0,24.0,9.0,18.0,40.0,0.0,27.0,4.0,51.0,12.0,4.0,8.0,9.0,26.0,24.0,38.0,12.0,40.0,60.0,1.0,36.0,2.0,106.0,40.0,32.0,1.0,101.0,19.0,8.0,44.0
+2.0,2.0,0.0,6.0,0.0,5.0,5.0,0.0,15.0,2.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,16.0,0.0,0.0,6.0,2.0,5.0,0.0,6.0,1.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9.0,3.0,0.0,1.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,4.0,16.0,0.0,0.0,0.0,0.0,0.0,0.0,14.0,0.0,0.0,4.0,19.0,0.0,24.0,0.0,17.0,3.0,0.0,0.0,24.0,0.0,3.0,26.0,0.0,16.0,11.0,0.0,0.0,0.0,0.0,7.0,4.0,0.0,19.0,5.0,0.0,4.0
+0.0,3.0,0.0,12.0,1.0,8.0,1.0,0.0,14.0,3.0,8.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,1.0,10.0,4.0,0.0,5.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,7.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,5.0,1.0,0.0,0.0,6.0,0.0,0.0,5.0,0.0,0.0,4.0,3.0,0.0,12.0,0.0,4.0,0.0,0.0,0.0,27.0,0.0,4.0,15.0,0.0,8.0,15.0,0.0,0.0,0.0,4.0,2.0,3.0,0.0,17.0,2.0,0.0,1.0
+0.0,2.0,0.0,11.0,1.0,14.0,2.0,0.0,12.0,0.0,13.0,1.0,0.0,0.0,2.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,2.0,17.0,12.0,0.0,6.0,1.0,2.0,3.0,0.0,0.0,0.0,2.0,0.0,2.0,11.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,7.0,1.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,17.0,9.0,0.0,16.0,0.0,4.0,2.0,0.0,0.0,36.0,0.0,2.0,15.0,0.0,7.0,3.0,0.0,0.0,0.0,3.0,1.0,14.0,0.0,14.0,1.0,0.0,1.0
+1.0,5.0,0.0,9.0,0.0,8.0,1.0,0.0,5.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,3.0,6.0,5.0,0.0,9.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,11.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,6.0,3.0,0.0,6.0,0.0,0.0,4.0,5.0,0.0,16.0,0.0,2.0,0.0,0.0,0.0,11.0,0.0,1.0,7.0,0.0,10.0,1.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,12.0,1.0,0.0,0.0
+0.0,7.0,0.0,6.0,0.0,10.0,0.0,0.0,8.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,12.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,6.0,1.0,0.0,0.0,0.0,3.0,0.0,2.0,0.0,0.0,5.0,1.0,0.0,1.0,0.0,3.0,2.0,0.0,0.0,9.0,0.0,11.0,3.0,0.0,7.0,5.0,0.0,0.0,0.0,3.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0
+1.0,1.0,0.0,8.0,0.0,10.0,0.0,0.0,14.0,0.0,10.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,2.0,9.0,3.0,0.0,1.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,14.0,2.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,14.0,0.0,0.0,2.0,0.0,3.0,2.0,0.0,0.0,0.0,0.0,4.0,2.0,0.0,6.0,1.0,0.0,5.0
+0.0,9.0,0.0,16.0,0.0,13.0,1.0,0.0,17.0,0.0,4.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,14.0,0.0,0.0,0.0,11.0,1.0,0.0,6.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,8.0,2.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,4.0,2.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,32.0,7.0,0.0,18.0,0.0,12.0,3.0,0.0,0.0,23.0,3.0,3.0,11.0,0.0,12.0,3.0,0.0,0.0,0.0,4.0,1.0,9.0,0.0,19.0,1.0,0.0,1.0
+0.0,3.0,0.0,6.0,1.0,3.0,0.0,0.0,13.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,9.0,7.0,5.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,5.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,8.0,0.0,0.0,0.0,3.0,0.0,0.0,3.0,0.0,0.0,9.0,13.0,0.0,25.0,0.0,7.0,0.0,0.0,0.0,25.0,0.0,0.0,24.0,0.0,8.0,3.0,0.0,0.0,0.0,2.0,1.0,3.0,0.0,19.0,0.0,0.0,1.0
+5.0,4.0,0.0,6.0,0.0,15.0,2.0,0.0,5.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,1.0,1.0,19.0,2.0,0.0,1.0,6.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,1.0,4.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,8.0,13.0,0.0,11.0,0.0,4.0,0.0,0.0,0.0,11.0,0.0,2.0,8.0,0.0,15.0,3.0,0.0,0.0,0.0,4.0,0.0,1.0,0.0,10.0,1.0,0.0,1.0
+1.0,7.0,0.0,6.0,0.0,3.0,0.0,1.0,5.0,0.0,2.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,1.0,0.0,4.0,9.0,0.0,1.0,1.0,5.0,1.0,1.0,0.0,12.0,0.0,0.0,1.0,6.0,3.0,0.0,0.0,12.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,6.0,5.0,0.0,0.0,0.0,3.0,8.0,0.0,6.0,0.0,0.0,10.0,8.0,0.0,7.0,0.0,8.0,2.0,1.0,0.0,3.0,2.0,7.0,19.0,3.0,14.0,19.0,3.0,0.0,0.0,1.0,6.0,10.0,0.0,24.0,1.0,0.0,6.0
+2.0,3.0,0.0,5.0,0.0,6.0,0.0,0.0,9.0,1.0,7.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,4.0,7.0,0.0,2.0,4.0,10.0,0.0,8.0,4.0,4.0,2.0,0.0,0.0,5.0,0.0,1.0,0.0,2.0,8.0,0.0,0.0,10.0,0.0,1.0,0.0,6.0,1.0,0.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,4.0,3.0,7.0,16.0,1.0,1.0,3.0,3.0,0.0,0.0,6.0,1.0,2.0,18.0,20.0,0.0,7.0,0.0,8.0,0.0,0.0,1.0,13.0,5.0,2.0,27.0,0.0,20.0,23.0,0.0,0.0,0.0,1.0,7.0,3.0,0.0,38.0,2.0,0.0,12.0
+0.0,8.0,0.0,7.0,2.0,10.0,0.0,0.0,3.0,0.0,7.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,5.0,0.0,3.0,11.0,6.0,0.0,5.0,1.0,4.0,1.0,0.0,0.0,2.0,0.0,3.0,0.0,8.0,1.0,2.0,0.0,3.0,0.0,0.0,0.0,3.0,0.0,1.0,0.0,0.0,3.0,5.0,1.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,4.0,9.0,1.0,0.0,2.0,2.0,1.0,0.0,4.0,0.0,0.0,4.0,10.0,0.0,17.0,0.0,16.0,0.0,2.0,3.0,11.0,1.0,5.0,14.0,1.0,37.0,13.0,0.0,0.0,0.0,1.0,4.0,6.0,0.0,49.0,2.0,0.0,6.0
+0.0,5.0,0.0,4.0,1.0,2.0,1.0,0.0,12.0,0.0,18.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,1.0,1.0,5.0,4.0,5.0,0.0,23.0,4.0,0.0,0.0,0.0,0.0,7.0,0.0,2.0,2.0,11.0,4.0,0.0,3.0,10.0,0.0,0.0,0.0,6.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0,9.0,5.0,4.0,23.0,0.0,0.0,0.0,2.0,1.0,0.0,12.0,17.0,3.0,31.0,16.0,0.0,19.0,0.0,12.0,3.0,0.0,6.0,30.0,0.0,5.0,20.0,0.0,17.0,17.0,0.0,6.0,0.0,5.0,4.0,8.0,0.0,45.0,4.0,0.0,18.0
+3.0,7.0,0.0,3.0,1.0,16.0,1.0,5.0,12.0,0.0,5.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,5.0,0.0,16.0,7.0,0.0,4.0,1.0,4.0,0.0,17.0,0.0,2.0,0.0,10.0,0.0,9.0,1.0,2.0,0.0,7.0,12.0,0.0,3.0,13.0,0.0,1.0,2.0,5.0,1.0,0.0,6.0,0.0,3.0,4.0,0.0,4.0,4.0,1.0,0.0,0.0,8.0,4.0,16.0,11.0,1.0,0.0,2.0,4.0,2.0,5.0,12.0,7.0,4.0,15.0,33.0,0.0,34.0,0.0,14.0,10.0,1.0,1.0,40.0,25.0,15.0,40.0,2.0,23.0,28.0,7.0,0.0,0.0,3.0,4.0,16.0,0.0,88.0,4.0,0.0,19.0
+1.0,1.0,0.0,8.0,2.0,13.0,0.0,4.0,9.0,3.0,7.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,4.0,12.0,0.0,1.0,7.0,3.0,0.0,16.0,6.0,9.0,0.0,0.0,5.0,12.0,1.0,0.0,0.0,12.0,2.0,0.0,0.0,5.0,0.0,3.0,0.0,2.0,0.0,1.0,3.0,0.0,0.0,4.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,9.0,7.0,1.0,0.0,0.0,3.0,4.0,0.0,11.0,9.0,0.0,28.0,48.0,0.0,42.0,0.0,19.0,1.0,0.0,0.0,30.0,1.0,6.0,29.0,0.0,42.0,16.0,0.0,0.0,0.0,10.0,6.0,6.0,0.0,70.0,0.0,0.0,4.0
+2.0,0.0,0.0,3.0,0.0,2.0,2.0,3.0,0.0,0.0,10.0,3.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,6.0,9.0,0.0,2.0,6.0,4.0,0.0,6.0,5.0,1.0,1.0,2.0,0.0,3.0,4.0,1.0,2.0,3.0,1.0,0.0,0.0,3.0,0.0,3.0,0.0,0.0,1.0,0.0,4.0,0.0,4.0,3.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,7.0,6.0,8.0,0.0,0.0,0.0,2.0,1.0,0.0,8.0,4.0,2.0,9.0,18.0,0.0,9.0,0.0,15.0,6.0,0.0,0.0,27.0,0.0,0.0,3.0,0.0,10.0,31.0,2.0,0.0,0.0,11.0,6.0,17.0,0.0,34.0,1.0,0.0,4.0
+3.0,0.0,0.0,6.0,0.0,8.0,0.0,0.0,3.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,3.0,0.0,0.0,3.0,0.0,1.0,0.0,5.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,7.0,12.0,0.0,10.0,0.0,7.0,5.0,0.0,0.0,17.0,0.0,2.0,3.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,3.0
+0.0,2.0,0.0,4.0,0.0,3.0,0.0,0.0,5.0,1.0,7.0,0.0,0.0,1.0,0.0,1.0,4.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,4.0,1.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,11.0,5.0,0.0,16.0,0.0,3.0,0.0,0.0,0.0,18.0,0.0,4.0,14.0,0.0,12.0,3.0,0.0,0.0,0.0,0.0,4.0,3.0,0.0,2.0,0.0,0.0,3.0
+1.0,4.0,0.0,2.0,2.0,9.0,1.0,0.0,10.0,0.0,3.0,0.0,0.0,0.0,0.0,2.0,4.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,2.0,10.0,5.0,3.0,0.0,2.0,7.0,0.0,2.0,0.0,0.0,0.0,2.0,0.0,0.0,6.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,6.0,3.0,15.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,4.0,5.0,0.0,0.0,1.0,6.0,0.0,1.0,1.0,12.0,2.0,8.0,8.0,0.0,4.0,6.0,0.0,0.0,0.0,1.0,1.0,6.0,0.0,17.0,1.0,0.0,1.0
+3.0,9.0,0.0,14.0,2.0,6.0,1.0,2.0,2.0,2.0,7.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,15.0,0.0,1.0,1.0,6.0,8.0,0.0,1.0,5.0,1.0,1.0,0.0,0.0,2.0,1.0,0.0,0.0,12.0,0.0,0.0,2.0,5.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,3.0,1.0,0.0,0.0,2.0,0.0,0.0,8.0,0.0,0.0,17.0,10.0,0.0,2.0,0.0,11.0,5.0,0.0,0.0,18.0,0.0,6.0,10.0,0.0,19.0,4.0,0.0,1.0,1.0,3.0,10.0,14.0,0.0,18.0,1.0,0.0,4.0
+1.0,5.0,0.0,6.0,4.0,2.0,2.0,3.0,7.0,15.0,3.0,0.0,0.0,0.0,1.0,5.0,0.0,0.0,4.0,4.0,6.0,1.0,6.0,9.0,0.0,0.0,4.0,12.0,0.0,5.0,13.0,4.0,4.0,0.0,0.0,9.0,15.0,0.0,4.0,8.0,15.0,0.0,1.0,16.0,0.0,4.0,1.0,0.0,11.0,0.0,8.0,3.0,5.0,2.0,3.0,0.0,1.0,1.0,1.0,0.0,3.0,10.0,18.0,15.0,1.0,13.0,4.0,6.0,4.0,0.0,11.0,7.0,2.0,18.0,11.0,0.0,10.0,1.0,12.0,7.0,0.0,5.0,31.0,6.0,18.0,14.0,1.0,30.0,28.0,3.0,45.0,2.0,9.0,24.0,26.0,21.0,64.0,24.0,0.0,18.0
+6.0,11.0,0.0,20.0,16.0,23.0,3.0,2.0,10.0,0.0,10.0,0.0,0.0,2.0,3.0,7.0,4.0,0.0,2.0,0.0,7.0,3.0,21.0,5.0,7.0,8.0,6.0,8.0,0.0,7.0,9.0,4.0,6.0,6.0,2.0,4.0,5.0,0.0,1.0,17.0,16.0,3.0,8.0,17.0,0.0,22.0,0.0,4.0,8.0,4.0,20.0,0.0,4.0,11.0,4.0,0.0,4.0,4.0,0.0,0.0,6.0,12.0,18.0,17.0,1.0,0.0,1.0,22.0,6.0,1.0,12.0,11.0,3.0,42.0,34.0,0.0,19.0,0.0,26.0,21.0,0.0,7.0,32.0,11.0,26.0,61.0,1.0,28.0,34.0,1.0,25.0,0.0,18.0,42.0,46.0,0.0,84.0,27.0,1.0,36.0
+8.0,3.0,0.0,7.0,8.0,6.0,6.0,4.0,18.0,1.0,7.0,1.0,0.0,0.0,1.0,12.0,5.0,0.0,10.0,1.0,6.0,0.0,6.0,13.0,13.0,1.0,10.0,17.0,0.0,0.0,16.0,2.0,5.0,8.0,1.0,7.0,22.0,0.0,10.0,18.0,12.0,2.0,13.0,4.0,0.0,14.0,4.0,0.0,28.0,6.0,15.0,1.0,18.0,5.0,5.0,0.0,4.0,4.0,3.0,0.0,1.0,12.0,11.0,18.0,20.0,0.0,8.0,12.0,7.0,0.0,14.0,23.0,11.0,11.0,27.0,1.0,20.0,10.0,15.0,13.0,3.0,7.0,15.0,27.0,12.0,23.0,20.0,25.0,45.0,4.0,36.0,0.0,87.0,48.0,39.0,0.0,41.0,88.0,0.0,110.0
+2.0,3.0,0.0,8.0,8.0,7.0,3.0,1.0,16.0,6.0,8.0,7.0,0.0,3.0,0.0,5.0,0.0,0.0,0.0,1.0,7.0,2.0,2.0,5.0,5.0,3.0,3.0,4.0,0.0,2.0,5.0,2.0,6.0,0.0,2.0,4.0,9.0,0.0,2.0,6.0,13.0,0.0,2.0,10.0,0.0,8.0,0.0,0.0,2.0,2.0,10.0,2.0,4.0,15.0,1.0,0.0,1.0,5.0,0.0,0.0,2.0,1.0,11.0,11.0,5.0,0.0,0.0,1.0,1.0,0.0,5.0,7.0,2.0,9.0,20.0,0.0,6.0,1.0,17.0,3.0,0.0,0.0,44.0,7.0,15.0,10.0,5.0,27.0,42.0,1.0,16.0,0.0,27.0,13.0,11.0,0.0,45.0,35.0,0.0,8.0
+1.0,18.0,0.0,13.0,22.0,22.0,3.0,7.0,22.0,15.0,11.0,2.0,0.0,3.0,1.0,4.0,5.0,0.0,0.0,0.0,3.0,1.0,16.0,12.0,6.0,11.0,29.0,9.0,1.0,3.0,5.0,6.0,2.0,0.0,1.0,12.0,4.0,0.0,4.0,27.0,15.0,0.0,7.0,17.0,0.0,11.0,3.0,0.0,5.0,9.0,3.0,1.0,5.0,4.0,1.0,0.0,4.0,1.0,2.0,0.0,5.0,22.0,29.0,34.0,3.0,0.0,0.0,10.0,3.0,0.0,12.0,14.0,7.0,25.0,19.0,0.0,10.0,1.0,33.0,9.0,1.0,1.0,28.0,3.0,6.0,28.0,5.0,17.0,52.0,2.0,4.0,0.0,23.0,47.0,33.0,0.0,70.0,19.0,1.0,17.0
+5.0,11.0,0.0,12.0,19.0,8.0,5.0,6.0,8.0,6.0,7.0,1.0,0.0,6.0,2.0,0.0,4.0,0.0,3.0,1.0,11.0,1.0,16.0,9.0,3.0,16.0,16.0,14.0,0.0,5.0,11.0,15.0,2.0,0.0,7.0,30.0,31.0,7.0,1.0,17.0,36.0,0.0,2.0,25.0,1.0,18.0,0.0,12.0,10.0,5.0,17.0,6.0,4.0,7.0,2.0,0.0,6.0,7.0,2.0,0.0,6.0,13.0,40.0,25.0,9.0,0.0,5.0,10.0,8.0,0.0,13.0,14.0,7.0,19.0,25.0,0.0,9.0,1.0,22.0,10.0,1.0,1.0,63.0,16.0,23.0,21.0,22.0,46.0,89.0,5.0,18.0,0.0,22.0,45.0,61.0,0.0,125.0,40.0,0.0,30.0
+8.0,8.0,0.0,22.0,15.0,21.0,14.0,3.0,12.0,8.0,13.0,1.0,0.0,0.0,0.0,9.0,8.0,0.0,7.0,5.0,9.0,0.0,14.0,26.0,0.0,27.0,9.0,21.0,0.0,9.0,19.0,9.0,12.0,0.0,4.0,18.0,18.0,5.0,7.0,20.0,23.0,1.0,8.0,27.0,0.0,23.0,0.0,26.0,40.0,9.0,22.0,0.0,7.0,8.0,6.0,3.0,1.0,11.0,0.0,0.0,14.0,14.0,41.0,34.0,19.0,0.0,2.0,10.0,14.0,0.0,12.0,34.0,12.0,10.0,21.0,0.0,14.0,0.0,26.0,21.0,4.0,10.0,144.0,21.0,46.0,16.0,4.0,45.0,100.0,0.0,68.0,0.0,31.0,43.0,45.0,0.0,152.0,35.0,0.0,57.0
+0.0,3.0,0.0,6.0,3.0,8.0,1.0,2.0,3.0,1.0,4.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,3.0,9.0,0.0,2.0,6.0,4.0,1.0,0.0,1.0,1.0,3.0,1.0,0.0,5.0,10.0,1.0,0.0,0.0,7.0,10.0,0.0,1.0,5.0,0.0,0.0,1.0,0.0,4.0,5.0,2.0,0.0,1.0,2.0,9.0,0.0,1.0,2.0,1.0,0.0,2.0,2.0,12.0,22.0,5.0,0.0,0.0,7.0,1.0,0.0,2.0,5.0,1.0,10.0,6.0,0.0,1.0,0.0,9.0,4.0,1.0,3.0,6.0,1.0,6.0,7.0,4.0,10.0,29.0,0.0,3.0,0.0,8.0,14.0,17.0,0.0,40.0,20.0,0.0,14.0
+5.0,4.0,0.0,8.0,9.0,8.0,1.0,3.0,15.0,15.0,5.0,5.0,0.0,1.0,2.0,5.0,9.0,0.0,8.0,3.0,3.0,0.0,6.0,6.0,10.0,7.0,5.0,7.0,0.0,2.0,7.0,3.0,15.0,0.0,3.0,9.0,10.0,0.0,1.0,11.0,8.0,0.0,8.0,7.0,0.0,12.0,10.0,0.0,12.0,0.0,4.0,3.0,6.0,19.0,9.0,0.0,4.0,6.0,2.0,0.0,5.0,12.0,20.0,14.0,4.0,0.0,3.0,3.0,1.0,0.0,4.0,3.0,5.0,10.0,15.0,1.0,5.0,2.0,12.0,11.0,3.0,12.0,13.0,14.0,17.0,14.0,8.0,24.0,57.0,13.0,18.0,0.0,39.0,48.0,35.0,0.0,79.0,83.0,0.0,28.0
+1.0,9.0,0.0,15.0,1.0,7.0,4.0,1.0,12.0,1.0,3.0,5.0,0.0,1.0,2.0,5.0,8.0,0.0,6.0,2.0,2.0,4.0,10.0,1.0,6.0,3.0,8.0,2.0,0.0,5.0,8.0,0.0,11.0,0.0,5.0,1.0,0.0,0.0,4.0,15.0,0.0,3.0,7.0,2.0,2.0,0.0,0.0,0.0,23.0,2.0,1.0,0.0,9.0,28.0,28.0,0.0,8.0,1.0,2.0,0.0,7.0,4.0,11.0,2.0,9.0,0.0,1.0,12.0,3.0,1.0,3.0,2.0,5.0,12.0,23.0,0.0,7.0,0.0,8.0,5.0,3.0,4.0,52.0,9.0,27.0,18.0,15.0,11.0,15.0,6.0,4.0,12.0,49.0,12.0,29.0,0.0,26.0,39.0,98.0,24.0
+1.0,3.0,0.0,2.0,1.0,8.0,3.0,0.0,11.0,0.0,2.0,4.0,0.0,0.0,0.0,4.0,2.0,0.0,2.0,4.0,0.0,3.0,5.0,10.0,4.0,3.0,7.0,5.0,0.0,1.0,8.0,1.0,8.0,0.0,10.0,3.0,2.0,0.0,5.0,9.0,1.0,2.0,2.0,0.0,0.0,3.0,11.0,0.0,17.0,0.0,3.0,0.0,9.0,5.0,23.0,0.0,6.0,0.0,0.0,0.0,4.0,5.0,6.0,9.0,2.0,0.0,4.0,2.0,2.0,4.0,4.0,1.0,1.0,4.0,4.0,0.0,6.0,0.0,12.0,13.0,1.0,0.0,109.0,6.0,10.0,14.0,17.0,13.0,5.0,2.0,13.0,29.0,12.0,16.0,26.0,0.0,15.0,31.0,120.0,29.0
+2.0,7.0,0.0,1.0,1.0,5.0,1.0,7.0,6.0,9.0,3.0,1.0,0.0,2.0,4.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,3.0,6.0,6.0,1.0,11.0,6.0,0.0,3.0,15.0,3.0,0.0,0.0,1.0,9.0,7.0,0.0,2.0,9.0,10.0,1.0,8.0,14.0,0.0,8.0,1.0,4.0,9.0,8.0,10.0,0.0,7.0,12.0,1.0,0.0,4.0,3.0,0.0,0.0,11.0,11.0,12.0,22.0,4.0,0.0,12.0,15.0,6.0,0.0,3.0,9.0,2.0,19.0,4.0,0.0,4.0,0.0,17.0,17.0,0.0,5.0,22.0,5.0,21.0,15.0,1.0,13.0,48.0,5.0,18.0,11.0,49.0,32.0,49.0,0.0,60.0,17.0,0.0,22.0
+5.0,5.0,0.0,16.0,6.0,25.0,6.0,12.0,16.0,10.0,5.0,0.0,0.0,2.0,1.0,3.0,0.0,0.0,0.0,3.0,4.0,24.0,19.0,13.0,5.0,24.0,13.0,13.0,0.0,4.0,23.0,5.0,3.0,0.0,0.0,6.0,18.0,2.0,3.0,24.0,21.0,1.0,3.0,27.0,0.0,16.0,0.0,10.0,21.0,3.0,14.0,0.0,5.0,11.0,6.0,4.0,2.0,1.0,0.0,0.0,8.0,18.0,24.0,50.0,6.0,0.0,3.0,9.0,8.0,0.0,6.0,15.0,15.0,14.0,11.0,0.0,9.0,0.0,19.0,13.0,0.0,6.0,44.0,20.0,20.0,13.0,8.0,41.0,66.0,4.0,11.0,0.0,36.0,46.0,60.0,0.0,142.0,21.0,0.0,50.0
+0.0,5.0,0.0,9.0,1.0,15.0,6.0,4.0,16.0,3.0,5.0,1.0,1.0,0.0,7.0,4.0,2.0,0.0,6.0,3.0,0.0,3.0,19.0,1.0,6.0,0.0,20.0,19.0,0.0,4.0,1.0,13.0,2.0,2.0,3.0,27.0,1.0,0.0,3.0,17.0,26.0,1.0,8.0,26.0,0.0,7.0,0.0,0.0,11.0,6.0,10.0,0.0,5.0,2.0,1.0,0.0,2.0,2.0,5.0,0.0,0.0,7.0,28.0,13.0,23.0,0.0,0.0,13.0,14.0,0.0,19.0,15.0,4.0,28.0,28.0,0.0,20.0,0.0,15.0,43.0,1.0,0.0,15.0,2.0,48.0,38.0,6.0,78.0,99.0,1.0,19.0,0.0,83.0,62.0,68.0,1.0,215.0,2.0,1.0,2.0
+3.0,5.0,0.0,1.0,0.0,5.0,1.0,1.0,10.0,0.0,3.0,0.0,0.0,0.0,0.0,6.0,0.0,2.0,0.0,2.0,2.0,0.0,1.0,0.0,3.0,1.0,4.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,3.0,0.0,7.0,3.0,0.0,1.0,4.0,0.0,0.0,0.0,5.0,0.0,3.0,0.0,1.0,3.0,0.0,4.0,2.0,1.0,0.0,1.0,0.0,6.0,4.0,3.0,21.0,2.0,9.0,2.0,6.0,0.0,2.0,4.0,0.0,2.0,6.0,5.0,2.0,4.0,0.0,12.0,1.0,0.0,4.0,1.0,1.0,4.0,7.0,4.0,17.0,5.0,1.0,0.0,0.0,7.0,23.0,13.0,2.0,7.0,17.0,2.0,130.0
+6.0,3.0,0.0,1.0,3.0,10.0,4.0,2.0,4.0,7.0,3.0,4.0,0.0,0.0,0.0,8.0,1.0,0.0,0.0,2.0,0.0,0.0,8.0,2.0,3.0,7.0,3.0,7.0,0.0,1.0,7.0,0.0,12.0,0.0,12.0,0.0,1.0,1.0,15.0,16.0,0.0,0.0,3.0,0.0,0.0,2.0,1.0,0.0,17.0,1.0,7.0,0.0,6.0,16.0,32.0,0.0,2.0,3.0,3.0,0.0,1.0,15.0,3.0,31.0,9.0,0.0,8.0,2.0,3.0,1.0,12.0,1.0,9.0,11.0,8.0,2.0,6.0,1.0,17.0,7.0,1.0,6.0,29.0,11.0,13.0,14.0,45.0,17.0,3.0,0.0,3.0,0.0,33.0,9.0,18.0,0.0,8.0,160.0,118.0,17.0
+10.0,19.0,0.0,14.0,3.0,20.0,5.0,2.0,25.0,17.0,1.0,4.0,0.0,0.0,0.0,5.0,5.0,0.0,1.0,2.0,29.0,1.0,21.0,1.0,12.0,21.0,13.0,12.0,0.0,9.0,12.0,1.0,10.0,1.0,12.0,4.0,3.0,0.0,11.0,25.0,4.0,2.0,6.0,5.0,0.0,26.0,0.0,0.0,35.0,4.0,19.0,0.0,16.0,13.0,21.0,2.0,7.0,9.0,3.0,0.0,15.0,9.0,9.0,54.0,10.0,0.0,3.0,17.0,16.0,0.0,18.0,9.0,14.0,30.0,33.0,0.0,27.0,2.0,48.0,24.0,0.0,4.0,30.0,38.0,34.0,42.0,19.0,59.0,17.0,2.0,130.0,0.0,46.0,33.0,83.0,0.0,34.0,30.0,1.0,41.0
+7.0,4.0,0.0,8.0,1.0,5.0,2.0,2.0,4.0,9.0,6.0,6.0,0.0,0.0,2.0,3.0,1.0,0.0,0.0,1.0,1.0,0.0,3.0,0.0,6.0,1.0,8.0,3.0,0.0,6.0,6.0,0.0,14.0,0.0,5.0,0.0,12.0,0.0,9.0,8.0,1.0,0.0,2.0,3.0,0.0,5.0,0.0,0.0,29.0,0.0,4.0,0.0,16.0,9.0,17.0,0.0,2.0,1.0,0.0,0.0,8.0,6.0,3.0,16.0,3.0,0.0,1.0,11.0,0.0,0.0,11.0,2.0,2.0,17.0,12.0,1.0,7.0,0.0,17.0,14.0,1.0,0.0,63.0,7.0,17.0,19.0,25.0,28.0,5.0,0.0,9.0,3.0,15.0,1.0,22.0,0.0,12.0,42.0,0.0,19.0
+1.0,3.0,0.0,2.0,0.0,2.0,1.0,0.0,3.0,2.0,2.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0,0.0,0.0,1.0,0.0,3.0,0.0,2.0,2.0,5.0,2.0,0.0,2.0,1.0,0.0,4.0,0.0,1.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,7.0,0.0,0.0,18.0,0.0,0.0,0.0,8.0,4.0,4.0,0.0,6.0,0.0,0.0,0.0,6.0,3.0,2.0,13.0,4.0,0.0,2.0,1.0,2.0,0.0,5.0,1.0,0.0,6.0,7.0,0.0,8.0,0.0,11.0,5.0,0.0,3.0,39.0,9.0,7.0,6.0,13.0,18.0,5.0,1.0,6.0,3.0,12.0,0.0,5.0,0.0,5.0,24.0,0.0,11.0
+0.0,3.0,0.0,13.0,2.0,7.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.0,4.0,3.0,0.0,0.0,3.0,6.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,6.0,3.0,0.0,4.0,0.0,7.0,0.0,0.0,0.0,4.0,0.0,0.0,7.0,0.0,11.0,2.0,0.0,0.0,0.0,0.0,4.0,1.0,0.0,32.0,0.0,0.0,0.0
+0.0,5.0,0.0,13.0,9.0,10.0,0.0,0.0,20.0,0.0,3.0,2.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,1.0,7.0,11.0,0.0,0.0,5.0,0.0,2.0,0.0,0.0,0.0,3.0,9.0,0.0,0.0,6.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,4.0,2.0,0.0,17.0,0.0,0.0,14.0,26.0,0.0,31.0,0.0,42.0,14.0,0.0,0.0,22.0,0.0,1.0,55.0,0.0,52.0,1.0,0.0,0.0,0.0,6.0,0.0,1.0,0.0,24.0,4.0,0.0,0.0
+7.0,7.0,0.0,0.0,2.0,14.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,12.0,0.0,5.0,21.0,13.0,0.0,7.0,0.0,4.0,0.0,0.0,0.0,10.0,6.0,0.0,0.0,4.0,7.0,0.0,0.0,7.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,10.0,6.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,18.0,1.0,14.0,10.0,0.0,19.0,0.0,9.0,0.0,0.0,0.0,17.0,0.0,0.0,13.0,7.0,11.0,30.0,0.0,5.0,0.0,3.0,34.0,0.0,0.0,17.0,0.0,0.0,0.0
+8.0,25.0,0.0,59.0,35.0,70.0,9.0,12.0,40.0,3.0,57.0,1.0,0.0,2.0,0.0,11.0,0.0,0.0,0.0,8.0,2.0,41.0,77.0,68.0,11.0,81.0,94.0,81.0,0.0,35.0,21.0,34.0,0.0,0.0,0.0,86.0,32.0,10.0,0.0,58.0,51.0,2.0,13.0,43.0,0.0,23.0,9.0,10.0,9.0,4.0,35.0,0.0,8.0,2.0,8.0,0.0,0.0,4.0,0.0,0.0,3.0,14.0,52.0,71.0,14.0,4.0,1.0,29.0,23.0,0.0,73.0,16.0,0.0,71.0,56.0,0.0,102.0,0.0,110.0,18.0,5.0,2.0,78.0,3.0,30.0,89.0,9.0,168.0,82.0,3.0,7.0,0.0,57.0,94.0,115.0,12.0,345.0,9.0,0.0,30.0
+0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,9.0,0.0,0.0,1.0,0.0,0.0,1.0,5.0,0.0,0.0,0.0,0.0,2.0,0.0,12.0,0.0,0.0,0.0
+0.0,2.0,0.0,4.0,1.0,1.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,7.0,8.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,7.0,0.0,1.0,5.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,5.0,2.0,0.0,0.0
+2.0,0.0,0.0,4.0,1.0,2.0,1.0,0.0,6.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,3.0,1.0,0.0,3.0,5.0,0.0,4.0,0.0,0.0,2.0,0.0,2.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,3.0,1.0,0.0,0.0,4.0,1.0,0.0,1.0,0.0,0.0,2.0,4.0,2.0,0.0,0.0,7.0,0.0,0.0,0.0,3.0,0.0,2.0,6.0,0.0,10.0,1.0,0.0,0.0,0.0,1.0,0.0,5.0,0.0,7.0,0.0,0.0,0.0
+0.0,1.0,0.0,3.0,1.0,1.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,3.0,1.0,1.0,1.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0
+1.0,0.0,0.0,5.0,0.0,0.0,2.0,0.0,5.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,6.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,10.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,6.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,4.0,0.0,3.0,6.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,6.0,0.0,0.0,0.0
+0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,11.0,5.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,5.0,3.0,0.0,2.0,2.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0,6.0,0.0,0.0,0.0
+0.0,3.0,0.0,0.0,0.0,6.0,0.0,0.0,10.0,3.0,3.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,4.0,14.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,2.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,10.0,0.0,1.0,3.0,1.0,7.0,0.0,0.0,0.0,0.0,3.0,4.0,6.0,0.0,7.0,0.0,0.0,0.0
+0.0,4.0,0.0,1.0,0.0,1.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,4.0,8.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,16.0,1.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,10.0,0.0,0.0,3.0,0.0,5.0,3.0,0.0,0.0,0.0,6.0,5.0,2.0,0.0,3.0,0.0,0.0,2.0
+0.0,2.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,6.0,0.0,0.0,0.0
+1.0,8.0,0.0,2.0,1.0,12.0,0.0,0.0,8.0,8.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,0.0,11.0,0.0,1.0,2.0,3.0,0.0,0.0,0.0,4.0,4.0,4.0,0.0,5.0,4.0,0.0,0.0,9.0,0.0,1.0,0.0,5.0,2.0,1.0,1.0,0.0,2.0,4.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,6.0,4.0,16.0,0.0,0.0,0.0,2.0,1.0,0.0,2.0,5.0,0.0,11.0,3.0,0.0,9.0,0.0,0.0,1.0,0.0,1.0,29.0,2.0,5.0,9.0,0.0,8.0,15.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,42.0,2.0,0.0,9.0
+0.0,6.0,0.0,1.0,0.0,2.0,1.0,0.0,7.0,0.0,3.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,6.0,0.0,3.0,3.0,7.0,0.0,0.0,0.0,1.0,0.0,7.0,0.0,1.0,5.0,0.0,0.0,4.0,0.0,0.0,0.0,3.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,0.0,2.0,4.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,14.0,3.0,6.0,5.0,0.0,7.0,0.0,7.0,3.0,0.0,1.0,7.0,0.0,15.0,4.0,0.0,9.0,16.0,1.0,1.0,0.0,1.0,3.0,4.0,0.0,37.0,1.0,0.0,10.0
+0.0,5.0,0.0,1.0,1.0,5.0,0.0,1.0,5.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,5.0,4.0,0.0,0.0,13.0,3.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,0.0,0.0,7.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,2.0,5.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,1.0,6.0,8.0,6.0,0.0,0.0,0.0,2.0,0.0,1.0,2.0,0.0,9.0,8.0,0.0,9.0,0.0,1.0,1.0,0.0,0.0,8.0,0.0,5.0,13.0,0.0,13.0,7.0,3.0,0.0,0.0,4.0,2.0,7.0,0.0,34.0,4.0,0.0,2.0
+0.0,0.0,0.0,5.0,0.0,5.0,1.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,2.0,2.0,4.0,0.0,0.0,3.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,0.0,2.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,3.0,20.0,3.0,0.0,3.0
+0.0,2.0,0.0,0.0,1.0,5.0,4.0,0.0,3.0,0.0,4.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,4.0,1.0,4.0,4.0,3.0,0.0,1.0,2.0,3.0,3.0,0.0,0.0,1.0,0.0,4.0,0.0,9.0,6.0,1.0,0.0,4.0,0.0,0.0,0.0,6.0,0.0,5.0,0.0,0.0,3.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,4.0,12.0,5.0,0.0,0.0,1.0,3.0,0.0,2.0,4.0,0.0,15.0,3.0,0.0,12.0,2.0,7.0,1.0,0.0,0.0,14.0,1.0,7.0,6.0,0.0,18.0,17.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,46.0,1.0,0.0,1.0
+0.0,1.0,0.0,1.0,2.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,2.0,3.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,7.0,8.0,0.0,3.0,11.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,7.0,0.0,13.0,13.0,0.0,0.0,0.0,3.0,1.0,1.0,0.0,21.0,0.0,0.0,1.0
+0.0,7.0,0.0,2.0,8.0,1.0,0.0,0.0,5.0,0.0,5.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,2.0,7.0,0.0,2.0,10.0,4.0,0.0,4.0,3.0,3.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,3.0,0.0,1.0,3.0,0.0,1.0,0.0,0.0,8.0,0.0,1.0,2.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,3.0,1.0,9.0,1.0,0.0,0.0,3.0,0.0,0.0,5.0,0.0,0.0,2.0,10.0,0.0,6.0,0.0,4.0,0.0,0.0,0.0,23.0,0.0,6.0,9.0,0.0,10.0,5.0,0.0,2.0,0.0,3.0,2.0,8.0,0.0,17.0,3.0,0.0,0.0
+0.0,2.0,0.0,6.0,1.0,9.0,0.0,0.0,7.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,3.0,0.0,3.0,1.0,0.0,0.0,1.0,3.0,2.0,5.0,0.0,0.0,3.0,0.0,1.0,0.0,3.0,5.0,0.0,1.0,4.0,0.0,1.0,0.0,3.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,2.0,1.0,9.0,2.0,7.0,0.0,4.0,2.0,0.0,5.0,1.0,0.0,2.0,2.0,0.0,0.0,0.0,6.0,4.0,0.0,0.0,20.0,0.0,5.0,4.0,2.0,6.0,16.0,0.0,0.0,0.0,4.0,3.0,12.0,2.0,23.0,0.0,0.0,1.0
+0.0,10.0,0.0,5.0,0.0,1.0,1.0,1.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,5.0,1.0,0.0,6.0,14.0,0.0,15.0,0.0,5.0,0.0,19.0,4.0,1.0,4.0,7.0,0.0,1.0,13.0,6.0,1.0,4.0,9.0,2.0,1.0,6.0,0.0,21.0,0.0,9.0,0.0,2.0,10.0,0.0,0.0,1.0,1.0,0.0,6.0,6.0,0.0,0.0,2.0,6.0,0.0,14.0,2.0,6.0,9.0,0.0,2.0,0.0,15.0,1.0,1.0,15.0,13.0,0.0,6.0,2.0,23.0,3.0,0.0,1.0,20.0,9.0,9.0,15.0,1.0,16.0,21.0,2.0,0.0,0.0,11.0,26.0,14.0,14.0,86.0,16.0,0.0,19.0
+2.0,2.0,0.0,1.0,2.0,2.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,4.0,2.0,2.0,4.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9.0,4.0,2.0,2.0,3.0,0.0,0.0,0.0,4.0,0.0,4.0,0.0,0.0,1.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,1.0,3.0,0.0,5.0,0.0,0.0,4.0,9.0,0.0,3.0,1.0,14.0,8.0,2.0,3.0,0.0,2.0,1.0,13.0,0.0,6.0,8.0,4.0,0.0,0.0,6.0,8.0,7.0,0.0,13.0,2.0,0.0,4.0
+0.0,5.0,0.0,5.0,2.0,4.0,2.0,0.0,6.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,4.0,1.0,3.0,1.0,3.0,4.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,2.0,7.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,7.0,1.0,10.0,5.0,0.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,2.0,6.0,0.0,3.0,0.0,4.0,3.0,0.0,0.0,3.0,1.0,13.0,4.0,0.0,11.0,3.0,0.0,0.0,0.0,7.0,12.0,7.0,0.0,5.0,4.0,0.0,1.0
+5.0,3.0,0.0,1.0,0.0,0.0,1.0,1.0,3.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,4.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,7.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,2.0,1.0,1.0,0.0,1.0,0.0,4.0,3.0,3.0,1.0,0.0,0.0,2.0,0.0,2.0,1.0,4.0,4.0,2.0,0.0,0.0,0.0,3.0,0.0,2.0,1.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,6.0,1.0,1.0,3.0,2.0,10.0,0.0,0.0,0.0,4.0,10.0,0.0,0.0,10.0,13.0,19.0,7.0,6.0,4.0,0.0,12.0
+0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,4.0,0.0,4.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,6.0,4.0,2.0,0.0,3.0,0.0,3.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,5.0,9.0,1.0,0.0,2.0,0.0,0.0,0.0
+0.0,1.0,0.0,0.0,0.0,4.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,1.0,1.0,0.0,6.0,0.0,0.0,0.0,5.0,0.0,1.0,2.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,3.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,6.0,0.0,9.0,3.0,0.0,0.0,2.0,0.0,3.0,5.0,0.0,7.0,4.0,0.0,0.0,0.0,4.0,2.0,6.0,0.0,4.0,0.0,0.0,2.0
+0.0,1.0,0.0,0.0,1.0,0.0,4.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,4.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,2.0,1.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,3.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,2.0,14.0,0.0,6.0,0.0,0.0,0.0
+4.0,7.0,0.0,2.0,10.0,6.0,2.0,0.0,12.0,7.0,9.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,4.0,1.0,4.0,0.0,13.0,4.0,2.0,7.0,10.0,13.0,0.0,8.0,0.0,2.0,2.0,4.0,0.0,2.0,0.0,0.0,0.0,9.0,3.0,0.0,3.0,4.0,0.0,7.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,3.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,1.0,9.0,2.0,0.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,7.0,12.0,0.0,13.0,2.0,12.0,6.0,0.0,0.0,19.0,1.0,17.0,24.0,1.0,8.0,10.0,0.0,1.0,0.0,6.0,8.0,34.0,0.0,39.0,1.0,0.0,0.0
+1.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,5.0,0.0,1.0,0.0,0.0,2.0,3.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,1.0,4.0,1.0,0.0,3.0,3.0,6.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0,4.0,0.0,0.0,3.0,3.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,0.0,2.0,12.0,3.0,4.0,2.0,12.0,4.0,0.0,0.0,9.0,4.0,14.0,5.0,12.0,8.0,2.0,35.0
+3.0,18.0,0.0,11.0,7.0,17.0,3.0,0.0,24.0,13.0,7.0,2.0,0.0,0.0,1.0,0.0,2.0,0.0,2.0,5.0,2.0,2.0,17.0,4.0,3.0,11.0,21.0,18.0,0.0,12.0,7.0,1.0,1.0,23.0,2.0,2.0,4.0,0.0,1.0,17.0,6.0,0.0,4.0,3.0,0.0,9.0,0.0,0.0,5.0,0.0,21.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,13.0,3.0,28.0,1.0,0.0,0.0,1.0,3.0,0.0,17.0,0.0,1.0,24.0,31.0,2.0,38.0,0.0,53.0,8.0,0.0,0.0,10.0,0.0,31.0,54.0,4.0,39.0,11.0,0.0,1.0,0.0,20.0,29.0,60.0,0.0,31.0,6.0,0.0,5.0
+11.0,11.0,0.0,11.0,6.0,13.0,4.0,6.0,9.0,7.0,16.0,5.0,1.0,1.0,0.0,5.0,2.0,0.0,14.0,0.0,2.0,24.0,8.0,33.0,12.0,7.0,11.0,17.0,0.0,16.0,26.0,8.0,12.0,2.0,11.0,5.0,15.0,0.0,31.0,6.0,16.0,0.0,14.0,10.0,3.0,18.0,4.0,0.0,18.0,6.0,13.0,7.0,31.0,7.0,12.0,1.0,6.0,0.0,8.0,0.0,12.0,23.0,20.0,38.0,21.0,0.0,1.0,14.0,11.0,0.0,22.0,10.0,11.0,41.0,34.0,0.0,44.0,1.0,56.0,25.0,5.0,9.0,21.0,24.0,67.0,43.0,24.0,62.0,40.0,3.0,37.0,10.0,43.0,38.0,86.0,0.0,76.0,40.0,1.0,12.0
+3.0,4.0,0.0,5.0,9.0,8.0,10.0,4.0,4.0,11.0,9.0,3.0,0.0,0.0,3.0,15.0,0.0,0.0,24.0,3.0,5.0,10.0,9.0,8.0,27.0,5.0,8.0,7.0,0.0,2.0,15.0,0.0,14.0,1.0,15.0,0.0,42.0,0.0,38.0,13.0,4.0,0.0,15.0,1.0,9.0,28.0,0.0,0.0,32.0,4.0,30.0,15.0,27.0,23.0,23.0,0.0,10.0,0.0,11.0,0.0,8.0,9.0,9.0,9.0,9.0,0.0,10.0,13.0,5.0,0.0,8.0,7.0,11.0,14.0,13.0,1.0,35.0,0.0,39.0,15.0,1.0,20.0,21.0,28.0,51.0,25.0,29.0,27.0,15.0,21.0,33.0,8.0,64.0,21.0,63.0,0.0,31.0,86.0,0.0,46.0
+16.0,15.0,0.0,7.0,1.0,11.0,6.0,3.0,17.0,11.0,8.0,1.0,0.0,3.0,1.0,10.0,2.0,0.0,10.0,7.0,7.0,0.0,22.0,35.0,15.0,0.0,17.0,22.0,0.0,3.0,17.0,7.0,16.0,10.0,18.0,13.0,43.0,0.0,22.0,10.0,26.0,0.0,11.0,28.0,0.0,29.0,10.0,0.0,6.0,16.0,17.0,12.0,18.0,4.0,19.0,0.0,10.0,3.0,5.0,0.0,13.0,18.0,33.0,21.0,11.0,0.0,5.0,19.0,3.0,2.0,11.0,27.0,18.0,16.0,27.0,1.0,21.0,0.0,34.0,14.0,6.0,13.0,22.0,18.0,35.0,29.0,17.0,39.0,97.0,2.0,5.0,0.0,54.0,71.0,52.0,0.0,165.0,36.0,2.0,14.0
+4.0,11.0,0.0,7.0,8.0,8.0,1.0,1.0,1.0,8.0,6.0,0.0,0.0,0.0,1.0,8.0,0.0,0.0,0.0,0.0,9.0,2.0,14.0,8.0,3.0,11.0,7.0,9.0,0.0,10.0,8.0,0.0,1.0,95.0,2.0,3.0,30.0,0.0,3.0,14.0,2.0,0.0,2.0,1.0,3.0,44.0,0.0,0.0,12.0,0.0,33.0,0.0,4.0,12.0,8.0,5.0,6.0,7.0,0.0,0.0,5.0,9.0,6.0,20.0,4.0,0.0,0.0,13.0,8.0,0.0,13.0,5.0,7.0,11.0,14.0,0.0,18.0,6.0,32.0,18.0,1.0,8.0,35.0,12.0,48.0,27.0,1.0,26.0,12.0,9.0,48.0,0.0,9.0,21.0,61.0,0.0,32.0,54.0,0.0,34.0
+7.0,3.0,0.0,7.0,10.0,4.0,0.0,6.0,10.0,40.0,3.0,1.0,0.0,3.0,1.0,8.0,0.0,0.0,1.0,3.0,10.0,0.0,5.0,3.0,2.0,5.0,3.0,4.0,0.0,10.0,12.0,1.0,6.0,68.0,2.0,3.0,32.0,0.0,3.0,9.0,9.0,0.0,5.0,4.0,5.0,23.0,6.0,0.0,21.0,2.0,35.0,3.0,7.0,20.0,5.0,1.0,5.0,3.0,0.0,0.0,13.0,7.0,13.0,24.0,3.0,0.0,12.0,7.0,5.0,0.0,18.0,3.0,8.0,12.0,17.0,2.0,22.0,2.0,33.0,22.0,2.0,12.0,78.0,10.0,31.0,15.0,3.0,19.0,13.0,7.0,21.0,0.0,17.0,5.0,63.0,1.0,44.0,42.0,0.0,40.0
+1.0,1.0,0.0,2.0,1.0,0.0,1.0,0.0,1.0,5.0,1.0,0.0,2.0,0.0,0.0,5.0,0.0,0.0,3.0,1.0,1.0,41.0,0.0,0.0,4.0,1.0,1.0,2.0,0.0,10.0,3.0,0.0,3.0,42.0,7.0,2.0,4.0,0.0,18.0,1.0,1.0,0.0,0.0,0.0,3.0,17.0,0.0,0.0,3.0,6.0,8.0,6.0,22.0,7.0,3.0,1.0,2.0,0.0,1.0,0.0,7.0,1.0,1.0,4.0,5.0,0.0,2.0,22.0,2.0,0.0,7.0,2.0,1.0,6.0,6.0,0.0,19.0,2.0,35.0,4.0,7.0,2.0,29.0,1.0,10.0,18.0,0.0,7.0,1.0,1.0,5.0,0.0,9.0,20.0,38.0,0.0,15.0,13.0,0.0,16.0
+9.0,3.0,0.0,1.0,6.0,5.0,4.0,4.0,8.0,23.0,4.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,1.0,3.0,0.0,6.0,16.0,4.0,9.0,3.0,3.0,0.0,7.0,3.0,4.0,5.0,26.0,7.0,1.0,30.0,0.0,4.0,7.0,3.0,0.0,2.0,0.0,64.0,32.0,0.0,0.0,21.0,4.0,25.0,2.0,11.0,12.0,7.0,4.0,7.0,7.0,0.0,0.0,4.0,1.0,5.0,7.0,3.0,3.0,0.0,12.0,12.0,2.0,9.0,9.0,9.0,10.0,17.0,1.0,20.0,8.0,18.0,26.0,2.0,11.0,21.0,9.0,52.0,21.0,12.0,27.0,15.0,23.0,16.0,6.0,18.0,39.0,68.0,4.0,32.0,34.0,1.0,49.0
+5.0,0.0,0.0,1.0,2.0,4.0,0.0,2.0,0.0,2.0,4.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,2.0,3.0,0.0,2.0,3.0,0.0,0.0,0.0,6.0,2.0,0.0,2.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,6.0,0.0,2.0,1.0,4.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,3.0,12.0,8.0,0.0,1.0,1.0,4.0,0.0,14.0,2.0,0.0,19.0,12.0,0.0,7.0,1.0,15.0,1.0,0.0,1.0,2.0,4.0,8.0,30.0,2.0,26.0,0.0,0.0,0.0,0.0,6.0,8.0,9.0,0.0,2.0,9.0,0.0,5.0
+2.0,10.0,0.0,10.0,5.0,14.0,1.0,0.0,5.0,14.0,3.0,3.0,0.0,3.0,0.0,4.0,1.0,0.0,5.0,6.0,2.0,2.0,18.0,4.0,5.0,1.0,0.0,11.0,0.0,10.0,5.0,25.0,1.0,0.0,5.0,15.0,12.0,0.0,6.0,18.0,24.0,0.0,3.0,48.0,0.0,6.0,1.0,0.0,8.0,1.0,1.0,0.0,4.0,4.0,8.0,0.0,1.0,1.0,2.0,0.0,14.0,0.0,76.0,26.0,4.0,0.0,0.0,22.0,18.0,0.0,6.0,31.0,3.0,29.0,15.0,0.0,19.0,2.0,14.0,12.0,0.0,2.0,9.0,9.0,19.0,43.0,11.0,34.0,78.0,0.0,13.0,0.0,32.0,32.0,41.0,0.0,138.0,18.0,0.0,10.0
+1.0,8.0,0.0,5.0,1.0,3.0,1.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,7.0,6.0,1.0,0.0,2.0,4.0,0.0,4.0,0.0,10.0,2.0,2.0,0.0,0.0,0.0,0.0,4.0,0.0,2.0,1.0,2.0,2.0,2.0,2.0,0.0,2.0,0.0,3.0,8.0,0.0,7.0,0.0,0.0,9.0,2.0,0.0,1.0,4.0,1.0,0.0,3.0,0.0,0.0,7.0,3.0,0.0,0.0,4.0,6.0,0.0,20.0,5.0,0.0,37.0,13.0,0.0,18.0,1.0,10.0,8.0,0.0,1.0,19.0,4.0,16.0,28.0,1.0,31.0,3.0,0.0,1.0,0.0,12.0,6.0,13.0,0.0,15.0,4.0,0.0,4.0
+1.0,0.0,0.0,4.0,1.0,2.0,3.0,0.0,1.0,1.0,1.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,2.0,2.0,0.0,0.0,3.0,0.0,1.0,0.0,10.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,4.0,6.0,1.0,0.0,2.0,2.0,0.0,3.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,12.0,3.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,3.0,1.0,1.0,0.0,1.0,1.0,5.0,0.0,11.0,2.0,2.0,26.0,14.0,0.0,21.0,0.0,18.0,9.0,0.0,0.0,7.0,0.0,8.0,40.0,11.0,33.0,0.0,0.0,18.0,0.0,6.0,3.0,9.0,0.0,2.0,1.0,0.0,7.0
+7.0,4.0,0.0,5.0,1.0,7.0,1.0,5.0,3.0,8.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0,0.0,1.0,3.0,1.0,0.0,6.0,2.0,3.0,2.0,0.0,8.0,0.0,14.0,5.0,5.0,0.0,0.0,1.0,5.0,10.0,7.0,8.0,6.0,6.0,2.0,2.0,16.0,0.0,9.0,0.0,10.0,6.0,3.0,3.0,1.0,5.0,4.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,34.0,22.0,2.0,0.0,4.0,10.0,10.0,0.0,14.0,15.0,4.0,49.0,19.0,0.0,6.0,0.0,20.0,6.0,0.0,0.0,16.0,7.0,15.0,34.0,3.0,44.0,29.0,0.0,8.0,0.0,13.0,17.0,17.0,0.0,85.0,24.0,0.0,19.0
+2.0,4.0,0.0,9.0,5.0,4.0,2.0,9.0,3.0,4.0,1.0,2.0,1.0,4.0,3.0,2.0,0.0,0.0,1.0,3.0,0.0,1.0,7.0,1.0,3.0,3.0,1.0,7.0,0.0,5.0,18.0,9.0,3.0,0.0,1.0,6.0,7.0,1.0,3.0,9.0,11.0,0.0,4.0,15.0,0.0,9.0,0.0,5.0,10.0,2.0,3.0,1.0,10.0,2.0,3.0,0.0,0.0,2.0,3.0,0.0,5.0,0.0,18.0,12.0,3.0,0.0,0.0,9.0,8.0,0.0,16.0,14.0,2.0,33.0,13.0,0.0,23.0,0.0,17.0,11.0,1.0,6.0,16.0,7.0,12.0,30.0,13.0,25.0,28.0,4.0,7.0,0.0,18.0,18.0,23.0,0.0,67.0,11.0,0.0,8.0
+1.0,2.0,0.0,1.0,0.0,1.0,0.0,1.0,4.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,23.0,0.0,0.0,0.0,1.0,0.0,3.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,1.0,2.0,0.0,3.0,0.0,0.0,15.0,0.0,3.0,6.0,2.0,4.0,0.0,3.0,0.0,0.0,0.0,5.0,5.0,1.0,0.0,3.0,1.0,0.0,3.0,1.0,7.0,4.0,0.0,1.0,0.0,4.0,2.0,0.0,16.0,10.0,0.0,1.0,1.0,6.0,2.0,0.0,3.0,12.0,4.0,4.0,15.0,0.0,21.0,9.0,5.0,1.0,0.0,5.0,6.0,12.0,6.0,22.0,8.0,0.0,4.0
+1.0,0.0,0.0,0.0,7.0,1.0,0.0,0.0,0.0,4.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,2.0,3.0,4.0,0.0,0.0,0.0,1.0,0.0,0.0,5.0,4.0,3.0,0.0,2.0,8.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,7.0,0.0,8.0,3.0,0.0,3.0,0.0,1.0,1.0,1.0,1.0,3.0,1.0,0.0,10.0,4.0,10.0,2.0,0.0,1.0,0.0,5.0,4.0,6.0,0.0,8.0,3.0,0.0,2.0
+1.0,4.0,0.0,4.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0,1.0,0.0,0.0,1.0,8.0,0.0,6.0,0.0,0.0,3.0,0.0,5.0,4.0,6.0,2.0,0.0,1.0,5.0,4.0,0.0,0.0,9.0,11.0,0.0,4.0,9.0,0.0,0.0,0.0,0.0,9.0,7.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,0.0,2.0,1.0,30.0,7.0,2.0,0.0,0.0,9.0,4.0,0.0,10.0,5.0,1.0,15.0,15.0,0.0,11.0,0.0,10.0,4.0,0.0,1.0,16.0,2.0,6.0,30.0,6.0,14.0,29.0,0.0,10.0,0.0,27.0,8.0,13.0,0.0,46.0,8.0,0.0,4.0
+2.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,5.0,1.0,4.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,3.0,0.0,1.0,0.0,5.0,2.0,0.0,0.0,0.0,1.0,0.0,3.0,0.0,1.0,2.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,4.0,0.0,1.0,1.0,5.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,5.0,0.0,3.0,2.0,0.0,0.0,1.0,5.0,3.0,0.0,10.0,0.0,2.0,15.0,17.0,0.0,17.0,0.0,17.0,2.0,0.0,2.0,16.0,6.0,20.0,25.0,1.0,25.0,6.0,0.0,3.0,0.0,15.0,8.0,28.0,0.0,9.0,13.0,0.0,9.0
+6.0,2.0,0.0,11.0,0.0,2.0,2.0,0.0,8.0,4.0,0.0,2.0,0.0,2.0,1.0,4.0,1.0,0.0,1.0,1.0,0.0,0.0,10.0,0.0,6.0,1.0,0.0,3.0,0.0,5.0,3.0,4.0,12.0,0.0,2.0,6.0,5.0,0.0,5.0,13.0,8.0,1.0,5.0,14.0,0.0,2.0,0.0,0.0,4.0,6.0,1.0,1.0,6.0,1.0,3.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,22.0,8.0,5.0,0.0,1.0,15.0,2.0,0.0,10.0,10.0,2.0,16.0,19.0,0.0,16.0,0.0,11.0,0.0,2.0,3.0,5.0,2.0,2.0,28.0,15.0,43.0,22.0,0.0,8.0,0.0,18.0,21.0,13.0,0.0,43.0,5.0,0.0,6.0
+2.0,1.0,0.0,3.0,2.0,1.0,4.0,0.0,6.0,2.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,4.0,5.0,2.0,0.0,4.0,0.0,1.0,2.0,0.0,0.0,0.0,3.0,3.0,0.0,5.0,2.0,4.0,0.0,5.0,0.0,5.0,8.0,0.0,1.0,1.0,10.0,1.0,1.0,0.0,0.0,7.0,0.0,7.0,0.0,3.0,6.0,6.0,0.0,4.0,6.0,1.0,0.0,0.0,0.0,11.0,4.0,3.0,0.0,0.0,5.0,5.0,0.0,8.0,1.0,4.0,16.0,14.0,0.0,20.0,0.0,11.0,6.0,0.0,5.0,6.0,6.0,17.0,22.0,6.0,19.0,7.0,3.0,10.0,0.0,13.0,10.0,23.0,0.0,26.0,22.0,0.0,10.0
+3.0,4.0,0.0,2.0,5.0,6.0,2.0,4.0,12.0,0.0,4.0,2.0,0.0,0.0,1.0,4.0,0.0,1.0,0.0,4.0,1.0,1.0,10.0,4.0,1.0,2.0,1.0,7.0,0.0,6.0,3.0,12.0,0.0,0.0,3.0,8.0,5.0,0.0,3.0,10.0,22.0,0.0,2.0,27.0,0.0,2.0,0.0,0.0,2.0,0.0,3.0,0.0,0.0,5.0,3.0,0.0,0.0,6.0,0.0,0.0,5.0,0.0,19.0,12.0,3.0,0.0,1.0,10.0,0.0,0.0,24.0,35.0,3.0,31.0,9.0,2.0,13.0,0.0,11.0,4.0,2.0,0.0,3.0,5.0,7.0,27.0,3.0,24.0,38.0,0.0,10.0,0.0,1.0,18.0,16.0,0.0,64.0,9.0,0.0,8.0
+1.0,7.0,0.0,6.0,1.0,1.0,1.0,0.0,6.0,4.0,0.0,0.0,0.0,0.0,0.0,1.0,4.0,0.0,0.0,1.0,0.0,2.0,4.0,0.0,2.0,2.0,0.0,2.0,0.0,7.0,0.0,13.0,0.0,0.0,3.0,5.0,3.0,0.0,7.0,12.0,10.0,0.0,1.0,29.0,0.0,0.0,0.0,0.0,6.0,1.0,0.0,1.0,3.0,0.0,2.0,0.0,0.0,2.0,1.0,0.0,1.0,0.0,41.0,11.0,2.0,0.0,0.0,3.0,2.0,0.0,9.0,33.0,1.0,14.0,5.0,0.0,3.0,0.0,8.0,1.0,0.0,2.0,15.0,2.0,4.0,17.0,6.0,23.0,21.0,0.0,7.0,0.0,10.0,7.0,7.0,0.0,57.0,5.0,0.0,2.0
+2.0,1.0,0.0,1.0,0.0,3.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,4.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,6.0,0.0,0.0,0.0,0.0,1.0,0.0,4.0,4.0,0.0,6.0,3.0,0.0,1.0,0.0,4.0,2.0,0.0,1.0,2.0,0.0,1.0,7.0,0.0,12.0,3.0,0.0,1.0,0.0,3.0,1.0,2.0,0.0,15.0,0.0,0.0,1.0
+0.0,2.0,0.0,2.0,5.0,2.0,0.0,0.0,3.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,3.0,0.0,0.0,1.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,0.0,0.0,0.0,4.0,0.0,5.0,0.0,0.0,9.0,9.0,0.0,8.0,0.0,1.0,0.0,0.0,0.0,5.0,1.0,4.0,10.0,0.0,7.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,5.0,0.0,0.0,1.0
+0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,3.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,7.0,2.0,0.0,0.0
+0.0,11.0,0.0,2.0,1.0,5.0,2.0,0.0,4.0,5.0,1.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,1.0,0.0,0.0,8.0,0.0,0.0,4.0,0.0,8.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,0.0,0.0,2.0,6.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,3.0,0.0,0.0,0.0,2.0,0.0,5.0,0.0,0.0,5.0,7.0,0.0,6.0,0.0,5.0,2.0,0.0,0.0,4.0,0.0,1.0,18.0,0.0,18.0,5.0,0.0,0.0,0.0,3.0,3.0,8.0,0.0,17.0,1.0,0.0,0.0
+0.0,3.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,1.0,1.0,4.0,0.0,5.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,4.0,2.0,0.0,0.0,0.0,3.0,0.0,5.0,0.0,0.0,17.0,9.0,0.0,9.0,0.0,4.0,2.0,0.0,0.0,2.0,0.0,1.0,10.0,0.0,12.0,1.0,0.0,0.0,0.0,0.0,1.0,7.0,0.0,4.0,2.0,0.0,0.0
+0.0,0.0,0.0,0.0,1.0,5.0,0.0,0.0,5.0,0.0,0.0,2.0,0.0,0.0,1.0,1.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,0.0,6.0,1.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,3.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,1.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,5.0,2.0,1.0,0.0,0.0,2.0,4.0,0.0,5.0,1.0,0.0,17.0,5.0,0.0,8.0,0.0,12.0,0.0,0.0,0.0,4.0,1.0,5.0,15.0,0.0,20.0,3.0,0.0,0.0,0.0,2.0,1.0,6.0,0.0,22.0,0.0,0.0,0.0
+3.0,8.0,0.0,1.0,3.0,3.0,0.0,0.0,12.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,3.0,0.0,0.0,4.0,1.0,1.0,6.0,0.0,5.0,0.0,9.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,4.0,3.0,0.0,0.0,0.0,2.0,0.0,7.0,1.0,0.0,21.0,11.0,0.0,8.0,0.0,8.0,6.0,0.0,0.0,11.0,0.0,0.0,12.0,0.0,14.0,9.0,0.0,1.0,0.0,9.0,4.0,12.0,0.0,8.0,2.0,0.0,1.0
+0.0,6.0,0.0,2.0,4.0,0.0,0.0,0.0,4.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,5.0,0.0,2.0,7.0,1.0,3.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,9.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,2.0,1.0,0.0,0.0,1.0,0.0,0.0,4.0,0.0,0.0,3.0,3.0,0.0,3.0,0.0,2.0,0.0,0.0,0.0,3.0,0.0,2.0,5.0,0.0,13.0,4.0,0.0,1.0,0.0,1.0,2.0,1.0,0.0,10.0,0.0,0.0,0.0
+0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,1.0,0.0,3.0,0.0,2.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,4.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,7.0,0.0,3.0,0.0,3.0,2.0,0.0,0.0,1.0,1.0,1.0,9.0,0.0,8.0,4.0,0.0,0.0,0.0,3.0,0.0,4.0,0.0,11.0,0.0,0.0,0.0
+0.0,1.0,0.0,5.0,2.0,3.0,1.0,0.0,4.0,5.0,4.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,7.0,1.0,0.0,7.0,0.0,3.0,0.0,7.0,3.0,1.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,5.0,1.0,0.0,0.0,3.0,0.0,3.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5.0,3.0,0.0,0.0,0.0,5.0,1.0,0.0,3.0,0.0,0.0,11.0,17.0,0.0,4.0,0.0,9.0,0.0,0.0,0.0,2.0,0.0,2.0,10.0,1.0,23.0,2.0,0.0,0.0,0.0,2.0,0.0,4.0,0.0,22.0,0.0,0.0,1.0
+1.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,4.0,3.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,8.0,1.0,1.0,0.0,0.0,1.0,6.0,0.0,1.0,4.0,0.0,2.0,0.0,3.0,0.0,1.0,0.0,1.0,1.0,2.0,0.0,0.0,2.0,3.0,1.0,0.0,0.0,4.0,4.0,0.0,0.0,6.0,1.0,4.0,0.0,0.0,3.0,0.0,0.0,1.0,2.0,1.0,1.0,4.0,0.0,1.0,3.0,1.0,0.0,2.0,1.0,1.0,4.0,3.0,0.0,6.0,0.0,14.0,1.0,0.0,3.0,9.0,0.0,7.0,9.0,5.0,7.0,6.0,6.0,2.0,3.0,14.0,9.0,14.0,0.0,9.0,9.0,14.0,8.0
+0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,4.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,3.0,1.0,0.0,5.0,0.0,7.0,3.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,4.0,1.0,1.0,0.0,0.0,3.0,6.0,3.0,0.0,8.0,1.0,0.0,2.0
+0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,1.0,2.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,5.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,0.0,3.0,5.0,0.0,6.0,0.0,4.0,5.0,0.0,0.0,0.0,0.0,3.0,4.0,1.0,2.0,1.0,1.0,0.0,0.0,2.0,5.0,13.0,0.0,3.0,1.0,0.0,2.0
+1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,2.0,4.0,7.0,2.0,3.0,0.0,3.0,0.0,0.0,2.0,5.0,0.0,0.0,4.0,0.0,1.0,4.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,1.0,1.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,4.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,2.0,1.0,11.0,0.0,2.0,0.0,4.0,1.0,0.0,0.0,8.0,0.0,6.0,12.0,2.0,10.0,5.0,1.0,3.0,0.0,3.0,6.0,9.0,0.0,4.0,1.0,0.0,2.0
+0.0,1.0,0.0,2.0,0.0,1.0,0.0,1.0,4.0,4.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,3.0,2.0,1.0,1.0,1.0,1.0,2.0,3.0,2.0,2.0,1.0,2.0,0.0,0.0,1.0,5.0,0.0,1.0,0.0,0.0,2.0,3.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,0.0,2.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,5.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,4.0,0.0,3.0,0.0,2.0,2.0,0.0,0.0,1.0,0.0,2.0,5.0,3.0,11.0,4.0,0.0,0.0,0.0,3.0,3.0,14.0,0.0,2.0,4.0,1.0,2.0
+0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,3.0,0.0,1.0,0.0,3.0,0.0,2.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,5.0,0.0,4.0,1.0,0.0,0.0,2.0,2.0,2.0,6.0,2.0,6.0,1.0,0.0,1.0,0.0,3.0,2.0,5.0,0.0,9.0,2.0,0.0,2.0
+1.0,1.0,0.0,0.0,2.0,2.0,0.0,0.0,2.0,3.0,0.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,5.0,0.0,0.0,0.0,3.0,0.0,1.0,0.0,1.0,0.0,7.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,1.0,1.0,0.0,1.0,4.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,5.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,3.0,0.0,5.0,5.0,0.0,2.0,1.0,0.0,1.0,0.0,2.0,4.0,6.0,0.0,6.0,2.0,0.0,5.0
+0.0,8.0,0.0,9.0,0.0,10.0,1.0,0.0,24.0,2.0,3.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,14.0,0.0,0.0,10.0,12.0,6.0,0.0,3.0,1.0,0.0,0.0,19.0,1.0,0.0,0.0,0.0,1.0,9.0,4.0,0.0,2.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,9.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,7.0,0.0,0.0,0.0,2.0,3.0,0.0,13.0,0.0,0.0,11.0,6.0,0.0,5.0,0.0,10.0,5.0,0.0,0.0,12.0,2.0,6.0,30.0,0.0,8.0,9.0,0.0,0.0,0.0,1.0,5.0,6.0,0.0,13.0,2.0,0.0,4.0
+8.0,6.0,0.0,6.0,0.0,15.0,12.0,1.0,11.0,8.0,6.0,2.0,0.0,3.0,1.0,13.0,0.0,0.0,15.0,1.0,3.0,30.0,5.0,12.0,11.0,4.0,7.0,10.0,0.0,8.0,10.0,1.0,17.0,20.0,7.0,7.0,9.0,0.0,5.0,5.0,13.0,0.0,14.0,5.0,1.0,4.0,2.0,0.0,15.0,5.0,21.0,6.0,16.0,2.0,20.0,0.0,4.0,2.0,9.0,0.0,6.0,13.0,25.0,10.0,10.0,0.0,9.0,18.0,4.0,0.0,4.0,6.0,3.0,19.0,32.0,4.0,25.0,0.0,34.0,17.0,17.0,16.0,40.0,15.0,41.0,28.0,11.0,74.0,80.0,9.0,20.0,0.0,47.0,14.0,95.0,1.0,78.0,38.0,9.0,33.0
+19.0,5.0,0.0,10.0,1.0,14.0,7.0,5.0,11.0,10.0,4.0,3.0,0.0,3.0,2.0,4.0,3.0,0.0,9.0,4.0,5.0,42.0,12.0,17.0,7.0,9.0,4.0,4.0,0.0,5.0,14.0,2.0,21.0,5.0,18.0,3.0,33.0,0.0,14.0,12.0,9.0,0.0,17.0,8.0,41.0,15.0,3.0,0.0,59.0,6.0,9.0,8.0,16.0,9.0,23.0,0.0,11.0,8.0,5.0,0.0,20.0,17.0,16.0,8.0,21.0,1.0,11.0,19.0,5.0,1.0,16.0,12.0,1.0,8.0,26.0,1.0,35.0,0.0,28.0,33.0,5.0,17.0,69.0,15.0,66.0,16.0,25.0,40.0,43.0,13.0,23.0,2.0,59.0,32.0,138.0,0.0,59.0,84.0,1.0,26.0
+7.0,5.0,0.0,3.0,10.0,9.0,4.0,1.0,14.0,23.0,5.0,0.0,0.0,0.0,1.0,5.0,0.0,0.0,6.0,4.0,10.0,41.0,12.0,12.0,6.0,15.0,9.0,8.0,0.0,24.0,19.0,9.0,8.0,5.0,8.0,6.0,7.0,0.0,10.0,16.0,11.0,0.0,7.0,5.0,0.0,5.0,0.0,0.0,37.0,2.0,12.0,1.0,21.0,18.0,5.0,0.0,3.0,4.0,2.0,0.0,22.0,12.0,16.0,14.0,9.0,0.0,6.0,16.0,11.0,0.0,20.0,11.0,5.0,19.0,39.0,2.0,39.0,4.0,52.0,31.0,3.0,10.0,47.0,13.0,89.0,67.0,10.0,64.0,58.0,5.0,42.0,0.0,61.0,39.0,140.0,0.0,96.0,55.0,4.0,10.0
+8.0,12.0,0.0,11.0,5.0,12.0,4.0,1.0,9.0,31.0,5.0,2.0,0.0,0.0,0.0,5.0,2.0,0.0,5.0,2.0,14.0,31.0,10.0,1.0,4.0,26.0,22.0,15.0,0.0,11.0,6.0,2.0,9.0,20.0,3.0,2.0,21.0,0.0,16.0,14.0,6.0,1.0,2.0,7.0,0.0,14.0,0.0,0.0,11.0,2.0,18.0,0.0,10.0,16.0,8.0,1.0,1.0,9.0,6.0,0.0,7.0,34.0,20.0,32.0,5.0,0.0,6.0,13.0,9.0,0.0,11.0,7.0,10.0,6.0,18.0,0.0,17.0,3.0,37.0,11.0,1.0,13.0,19.0,23.0,58.0,38.0,12.0,47.0,24.0,5.0,99.0,0.0,49.0,30.0,78.0,0.0,48.0,34.0,0.0,26.0
+15.0,13.0,0.0,4.0,5.0,7.0,6.0,6.0,7.0,3.0,3.0,0.0,0.0,0.0,2.0,13.0,0.0,0.0,6.0,2.0,10.0,0.0,12.0,25.0,11.0,3.0,14.0,15.0,0.0,19.0,20.0,4.0,7.0,2.0,4.0,10.0,23.0,0.0,14.0,12.0,11.0,0.0,7.0,14.0,2.0,25.0,1.0,0.0,28.0,8.0,32.0,1.0,14.0,11.0,7.0,0.0,5.0,11.0,6.0,0.0,15.0,14.0,37.0,50.0,21.0,0.0,12.0,14.0,8.0,0.0,26.0,6.0,15.0,29.0,38.0,2.0,41.0,3.0,36.0,27.0,0.0,13.0,65.0,39.0,61.0,44.0,8.0,57.0,42.0,5.0,84.0,0.0,78.0,12.0,80.0,0.0,101.0,90.0,1.0,37.0
+1.0,5.0,0.0,7.0,0.0,6.0,0.0,1.0,14.0,0.0,8.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,10.0,1.0,0.0,2.0,4.0,4.0,0.0,2.0,2.0,3.0,0.0,0.0,0.0,1.0,2.0,1.0,0.0,15.0,1.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,3.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,6.0,4.0,3.0,4.0,0.0,2.0,4.0,4.0,2.0,4.0,0.0,0.0,3.0,6.0,1.0,6.0,1.0,7.0,2.0,0.0,0.0,4.0,1.0,5.0,13.0,1.0,10.0,4.0,1.0,0.0,0.0,7.0,7.0,18.0,4.0,16.0,6.0,0.0,6.0
+0.0,2.0,0.0,6.0,3.0,5.0,1.0,0.0,4.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,2.0,11.0,5.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,4.0,0.0,1.0,3.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,3.0,0.0,2.0,0.0,0.0,4.0,2.0,0.0,2.0,0.0,4.0,2.0,0.0,0.0,5.0,1.0,6.0,5.0,0.0,8.0,3.0,0.0,0.0,1.0,0.0,2.0,6.0,0.0,9.0,2.0,0.0,4.0
+0.0,3.0,0.0,8.0,6.0,3.0,2.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,6.0,0.0,1.0,3.0,0.0,0.0,6.0,1.0,0.0,3.0,7.0,5.0,0.0,1.0,4.0,1.0,0.0,0.0,1.0,3.0,0.0,1.0,0.0,8.0,1.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,7.0,0.0,0.0,0.0,1.0,1.0,0.0,3.0,0.0,0.0,5.0,3.0,0.0,4.0,0.0,5.0,3.0,1.0,0.0,6.0,0.0,5.0,5.0,0.0,10.0,16.0,0.0,0.0,0.0,2.0,1.0,4.0,0.0,22.0,4.0,1.0,0.0
+1.0,13.0,2.0,12.0,3.0,12.0,4.0,0.0,19.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,3.0,1.0,0.0,10.0,0.0,0.0,9.0,15.0,10.0,0.0,0.0,2.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,30.0,0.0,2.0,2.0,1.0,0.0,2.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,3.0,0.0,8.0,1.0,0.0,0.0,1.0,2.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,2.0,0.0,2.0,1.0,0.0,0.0,8.0,1.0,2.0,8.0,0.0,12.0,9.0,0.0,0.0,0.0,2.0,11.0,8.0,0.0,15.0,2.0,1.0,3.0
+1.0,8.0,0.0,3.0,6.0,2.0,3.0,0.0,4.0,3.0,10.0,0.0,0.0,0.0,1.0,2.0,1.0,0.0,0.0,2.0,4.0,5.0,4.0,3.0,2.0,6.0,3.0,15.0,1.0,2.0,4.0,5.0,1.0,1.0,3.0,10.0,6.0,1.0,1.0,7.0,11.0,0.0,1.0,11.0,0.0,11.0,0.0,0.0,4.0,2.0,4.0,0.0,2.0,4.0,2.0,1.0,1.0,6.0,1.0,1.0,2.0,4.0,8.0,10.0,9.0,0.0,2.0,14.0,4.0,0.0,3.0,75.0,0.0,3.0,7.0,1.0,6.0,0.0,11.0,12.0,1.0,0.0,1.0,2.0,10.0,8.0,3.0,11.0,45.0,1.0,9.0,0.0,21.0,12.0,17.0,1.0,97.0,6.0,1.0,8.0
+7.0,9.0,0.0,17.0,18.0,14.0,7.0,0.0,17.0,8.0,9.0,5.0,0.0,1.0,0.0,3.0,2.0,0.0,0.0,8.0,1.0,9.0,21.0,8.0,15.0,4.0,16.0,19.0,0.0,6.0,4.0,7.0,19.0,0.0,10.0,5.0,6.0,1.0,12.0,17.0,6.0,0.0,13.0,6.0,0.0,24.0,0.0,0.0,26.0,0.0,14.0,0.0,12.0,2.0,21.0,0.0,1.0,14.0,7.0,0.0,6.0,11.0,16.0,30.0,24.0,1.0,1.0,24.0,17.0,0.0,14.0,38.0,3.0,20.0,15.0,0.0,28.0,1.0,19.0,29.0,1.0,1.0,21.0,13.0,24.0,28.0,30.0,40.0,34.0,0.0,37.0,0.0,76.0,30.0,51.0,3.0,69.0,27.0,3.0,27.0
+10.0,24.0,0.0,20.0,16.0,33.0,9.0,4.0,27.0,16.0,35.0,6.0,0.0,3.0,2.0,4.0,7.0,0.0,1.0,1.0,7.0,1.0,19.0,8.0,12.0,1.0,31.0,25.0,0.0,9.0,6.0,44.0,15.0,0.0,7.0,35.0,8.0,0.0,11.0,28.0,47.0,0.0,11.0,50.0,0.0,10.0,0.0,0.0,25.0,2.0,23.0,1.0,5.0,7.0,13.0,2.0,5.0,8.0,4.0,0.0,21.0,20.0,32.0,57.0,16.0,0.0,4.0,43.0,18.0,0.0,26.0,120.0,11.0,31.0,38.0,0.0,32.0,0.0,46.0,27.0,6.0,1.0,22.0,14.0,32.0,34.0,45.0,31.0,107.0,2.0,17.0,1.0,69.0,46.0,46.0,4.0,222.0,43.0,2.0,17.0
+4.0,8.0,0.0,8.0,19.0,11.0,7.0,0.0,21.0,13.0,20.0,2.0,0.0,5.0,0.0,9.0,13.0,1.0,3.0,5.0,4.0,0.0,23.0,4.0,12.0,0.0,20.0,18.0,0.0,3.0,3.0,31.0,4.0,0.0,4.0,22.0,6.0,0.0,1.0,16.0,25.0,0.0,6.0,53.0,1.0,15.0,0.0,1.0,6.0,6.0,17.0,1.0,4.0,1.0,7.0,2.0,2.0,9.0,2.0,0.0,7.0,14.0,21.0,26.0,13.0,0.0,1.0,38.0,8.0,2.0,7.0,20.0,6.0,10.0,12.0,1.0,19.0,0.0,36.0,14.0,0.0,1.0,10.0,6.0,27.0,24.0,17.0,27.0,98.0,2.0,51.0,1.0,68.0,72.0,47.0,7.0,142.0,24.0,3.0,16.0
+5.0,3.0,0.0,3.0,3.0,2.0,3.0,2.0,4.0,13.0,7.0,0.0,0.0,1.0,0.0,2.0,8.0,1.0,0.0,4.0,0.0,9.0,10.0,3.0,1.0,2.0,5.0,12.0,0.0,4.0,1.0,26.0,2.0,0.0,4.0,16.0,31.0,0.0,1.0,12.0,32.0,0.0,2.0,39.0,0.0,6.0,0.0,1.0,13.0,1.0,3.0,0.0,2.0,4.0,2.0,0.0,2.0,3.0,2.0,0.0,3.0,6.0,31.0,11.0,5.0,0.0,1.0,10.0,8.0,0.0,8.0,57.0,6.0,3.0,7.0,0.0,11.0,0.0,14.0,10.0,0.0,1.0,9.0,12.0,19.0,13.0,7.0,16.0,92.0,0.0,6.0,0.0,17.0,30.0,37.0,2.0,118.0,14.0,2.0,24.0
+6.0,9.0,0.0,6.0,8.0,9.0,3.0,1.0,6.0,8.0,2.0,1.0,0.0,2.0,1.0,9.0,1.0,0.0,1.0,7.0,5.0,0.0,4.0,5.0,5.0,3.0,5.0,7.0,1.0,9.0,2.0,14.0,2.0,0.0,1.0,18.0,18.0,4.0,8.0,9.0,25.0,0.0,5.0,11.0,5.0,13.0,1.0,5.0,8.0,1.0,13.0,0.0,3.0,1.0,4.0,1.0,0.0,12.0,2.0,0.0,9.0,1.0,16.0,19.0,4.0,0.0,5.0,26.0,6.0,0.0,14.0,18.0,7.0,29.0,37.0,2.0,37.0,1.0,34.0,14.0,4.0,5.0,10.0,9.0,24.0,46.0,12.0,38.0,67.0,2.0,8.0,1.0,22.0,40.0,40.0,7.0,115.0,8.0,0.0,43.0
+3.0,5.0,0.0,1.0,0.0,0.0,0.0,4.0,1.0,0.0,3.0,0.0,0.0,1.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,2.0,1.0,0.0,2.0,0.0,1.0,1.0,4.0,1.0,0.0,0.0,3.0,1.0,0.0,2.0,0.0,1.0,1.0,0.0,2.0,2.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,3.0,0.0,2.0,1.0,0.0,0.0,5.0,2.0,0.0,0.0,1.0,0.0,2.0,3.0,0.0,0.0,0.0,4.0,1.0,0.0,1.0,1.0,5.0,0.0,5.0,0.0,5.0,3.0,0.0,0.0,7.0,14.0,10.0,8.0,0.0,16.0,2.0,1.0,23.0
+26.0,5.0,4.0,10.0,16.0,8.0,6.0,21.0,10.0,7.0,19.0,4.0,0.0,2.0,6.0,15.0,0.0,4.0,38.0,18.0,13.0,6.0,16.0,14.0,16.0,28.0,6.0,13.0,0.0,20.0,18.0,21.0,9.0,2.0,8.0,16.0,11.0,30.0,8.0,16.0,23.0,15.0,12.0,13.0,0.0,28.0,7.0,16.0,29.0,16.0,22.0,11.0,15.0,1.0,17.0,23.0,27.0,31.0,13.0,1.0,68.0,9.0,30.0,32.0,48.0,41.0,70.0,44.0,40.0,40.0,35.0,24.0,36.0,42.0,40.0,21.0,49.0,19.0,45.0,63.0,17.0,76.0,7.0,124.0,69.0,61.0,80.0,52.0,84.0,189.0,54.0,61.0,113.0,112.0,156.0,188.0,191.0,226.0,9.0,933.0
+8.0,4.0,0.0,9.0,11.0,13.0,9.0,15.0,14.0,11.0,21.0,5.0,1.0,6.0,7.0,8.0,0.0,0.0,16.0,6.0,23.0,2.0,21.0,12.0,11.0,8.0,8.0,9.0,1.0,14.0,16.0,23.0,14.0,0.0,6.0,33.0,23.0,2.0,4.0,15.0,27.0,2.0,7.0,22.0,5.0,36.0,1.0,48.0,23.0,15.0,27.0,0.0,11.0,6.0,12.0,3.0,15.0,23.0,7.0,0.0,30.0,16.0,30.0,22.0,11.0,6.0,19.0,42.0,44.0,1.0,14.0,69.0,25.0,28.0,31.0,2.0,25.0,51.0,30.0,71.0,3.0,23.0,34.0,76.0,89.0,30.0,19.0,41.0,141.0,24.0,60.0,13.0,74.0,92.0,158.0,5.0,267.0,74.0,4.0,227.0
+8.0,8.0,0.0,15.0,1.0,6.0,4.0,9.0,10.0,14.0,10.0,1.0,16.0,1.0,0.0,4.0,0.0,0.0,2.0,7.0,11.0,10.0,17.0,21.0,7.0,5.0,11.0,11.0,2.0,8.0,10.0,6.0,4.0,0.0,3.0,20.0,19.0,3.0,2.0,12.0,15.0,1.0,9.0,5.0,4.0,40.0,6.0,4.0,19.0,1.0,31.0,7.0,13.0,9.0,5.0,4.0,9.0,2.0,4.0,1.0,10.0,5.0,21.0,32.0,5.0,24.0,16.0,26.0,23.0,11.0,20.0,25.0,16.0,23.0,17.0,10.0,32.0,51.0,25.0,33.0,9.0,16.0,2.0,50.0,38.0,36.0,21.0,30.0,56.0,22.0,44.0,9.0,28.0,34.0,79.0,92.0,129.0,128.0,4.0,212.0
+8.0,3.0,0.0,3.0,2.0,8.0,4.0,11.0,3.0,3.0,10.0,6.0,5.0,3.0,4.0,7.0,2.0,0.0,17.0,7.0,11.0,0.0,14.0,9.0,5.0,4.0,10.0,5.0,0.0,4.0,25.0,11.0,3.0,6.0,2.0,20.0,14.0,10.0,2.0,22.0,19.0,3.0,8.0,25.0,2.0,33.0,1.0,22.0,18.0,5.0,27.0,3.0,9.0,8.0,9.0,4.0,13.0,7.0,7.0,2.0,12.0,14.0,15.0,27.0,15.0,2.0,8.0,10.0,35.0,8.0,20.0,23.0,19.0,11.0,34.0,3.0,25.0,185.0,31.0,54.0,5.0,45.0,12.0,127.0,65.0,43.0,17.0,32.0,65.0,52.0,7.0,1.0,48.0,40.0,116.0,65.0,151.0,105.0,4.0,343.0
+10.0,17.0,0.0,7.0,15.0,7.0,1.0,7.0,16.0,8.0,18.0,0.0,0.0,2.0,1.0,9.0,1.0,0.0,2.0,5.0,10.0,2.0,24.0,10.0,3.0,6.0,13.0,10.0,0.0,16.0,17.0,9.0,5.0,2.0,3.0,10.0,22.0,23.0,2.0,23.0,5.0,3.0,8.0,17.0,0.0,30.0,9.0,32.0,25.0,0.0,19.0,2.0,7.0,11.0,5.0,17.0,13.0,7.0,6.0,0.0,14.0,10.0,21.0,25.0,7.0,11.0,15.0,22.0,26.0,11.0,23.0,18.0,15.0,34.0,28.0,4.0,40.0,133.0,32.0,60.0,2.0,18.0,19.0,64.0,66.0,54.0,35.0,41.0,66.0,19.0,19.0,25.0,32.0,44.0,82.0,125.0,150.0,138.0,2.0,295.0
+4.0,12.0,0.0,15.0,12.0,12.0,5.0,3.0,21.0,2.0,14.0,3.0,1.0,7.0,1.0,5.0,7.0,1.0,1.0,7.0,6.0,10.0,18.0,7.0,9.0,7.0,15.0,7.0,1.0,13.0,13.0,16.0,11.0,0.0,9.0,25.0,32.0,5.0,5.0,13.0,22.0,3.0,6.0,42.0,0.0,25.0,4.0,4.0,28.0,0.0,15.0,0.0,6.0,7.0,10.0,0.0,6.0,13.0,3.0,0.0,14.0,18.0,38.0,26.0,14.0,1.0,8.0,31.0,25.0,1.0,26.0,72.0,8.0,25.0,34.0,1.0,30.0,4.0,22.0,30.0,6.0,2.0,18.0,16.0,23.0,30.0,29.0,34.0,139.0,2.0,38.0,2.0,41.0,39.0,53.0,0.0,225.0,59.0,1.0,73.0
+0.0,0.0,0.0,0.0,2.0,2.0,1.0,0.0,1.0,6.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,0.0,0.0,2.0,7.0,2.0,0.0,1.0,1.0,0.0,2.0,4.0,1.0,6.0,1.0,2.0,4.0,0.0,1.0,0.0,2.0,0.0,0.0,4.0,1.0,0.0,0.0,1.0,1.0,1.0,4.0,1.0,1.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,4.0,0.0,1.0,7.0,0.0,3.0,0.0,5.0,14.0,4.0,2.0,6.0,14.0,1.0,3.0,1.0,3.0,4.0,4.0,6.0,35.0,5.0,0.0,36.0
+10.0,19.0,0.0,22.0,16.0,15.0,15.0,11.0,18.0,5.0,33.0,28.0,66.0,6.0,6.0,22.0,1.0,13.0,18.0,20.0,12.0,54.0,12.0,5.0,32.0,10.0,18.0,24.0,10.0,19.0,28.0,13.0,25.0,4.0,19.0,15.0,24.0,12.0,54.0,24.0,25.0,8.0,25.0,19.0,21.0,9.0,6.0,14.0,12.0,25.0,25.0,7.0,23.0,23.0,39.0,23.0,20.0,22.0,39.0,1.0,30.0,23.0,29.0,46.0,33.0,8.0,19.0,57.0,39.0,15.0,23.0,16.0,25.0,34.0,31.0,15.0,31.0,6.0,32.0,34.0,14.0,60.0,31.0,83.0,48.0,43.0,172.0,48.0,79.0,74.0,59.0,181.0,124.0,122.0,89.0,101.0,161.0,274.0,12.0,344.0
+5.0,2.0,0.0,2.0,0.0,3.0,2.0,1.0,3.0,0.0,2.0,1.0,0.0,0.0,3.0,4.0,2.0,0.0,10.0,1.0,5.0,0.0,4.0,3.0,6.0,0.0,4.0,0.0,0.0,1.0,4.0,0.0,3.0,0.0,4.0,1.0,1.0,0.0,4.0,3.0,1.0,0.0,2.0,0.0,0.0,2.0,1.0,0.0,2.0,0.0,1.0,4.0,8.0,4.0,5.0,0.0,4.0,6.0,3.0,0.0,1.0,2.0,2.0,2.0,3.0,1.0,0.0,4.0,4.0,6.0,5.0,1.0,2.0,0.0,2.0,0.0,5.0,2.0,4.0,7.0,2.0,3.0,6.0,8.0,13.0,3.0,8.0,3.0,4.0,4.0,6.0,5.0,7.0,22.0,10.0,4.0,4.0,16.0,2.0,18.0
+6.0,6.0,1.0,2.0,2.0,1.0,1.0,3.0,7.0,0.0,10.0,5.0,0.0,0.0,3.0,10.0,0.0,0.0,1.0,3.0,5.0,0.0,4.0,1.0,12.0,8.0,6.0,6.0,0.0,13.0,10.0,5.0,9.0,9.0,2.0,3.0,4.0,39.0,6.0,4.0,5.0,6.0,14.0,8.0,31.0,23.0,1.0,29.0,9.0,6.0,24.0,4.0,9.0,12.0,7.0,7.0,8.0,4.0,4.0,0.0,11.0,6.0,3.0,13.0,11.0,0.0,15.0,7.0,17.0,7.0,14.0,33.0,8.0,20.0,25.0,5.0,21.0,4.0,23.0,16.0,3.0,29.0,8.0,19.0,24.0,24.0,23.0,15.0,34.0,16.0,10.0,2.0,40.0,27.0,38.0,27.0,70.0,43.0,0.0,108.0
+10.0,13.0,0.0,13.0,13.0,10.0,3.0,9.0,11.0,14.0,23.0,3.0,9.0,5.0,1.0,6.0,2.0,1.0,2.0,11.0,17.0,30.0,4.0,19.0,8.0,16.0,11.0,17.0,0.0,21.0,20.0,29.0,8.0,3.0,6.0,10.0,26.0,32.0,3.0,10.0,15.0,0.0,3.0,20.0,3.0,30.0,4.0,30.0,11.0,8.0,64.0,6.0,7.0,8.0,12.0,15.0,8.0,9.0,5.0,2.0,9.0,16.0,21.0,56.0,16.0,24.0,19.0,31.0,29.0,28.0,42.0,24.0,31.0,24.0,32.0,8.0,36.0,3.0,34.0,42.0,11.0,23.0,18.0,105.0,67.0,37.0,68.0,41.0,91.0,35.0,40.0,44.0,39.0,76.0,77.0,138.0,194.0,159.0,5.0,231.0
+8.0,6.0,0.0,9.0,2.0,18.0,5.0,9.0,11.0,11.0,7.0,0.0,8.0,0.0,2.0,4.0,0.0,1.0,3.0,3.0,7.0,2.0,10.0,3.0,8.0,10.0,8.0,8.0,0.0,21.0,8.0,2.0,3.0,1.0,0.0,2.0,12.0,29.0,4.0,22.0,3.0,3.0,3.0,4.0,13.0,16.0,0.0,25.0,16.0,9.0,4.0,2.0,5.0,3.0,2.0,21.0,11.0,8.0,1.0,0.0,10.0,7.0,7.0,39.0,9.0,0.0,13.0,16.0,24.0,6.0,58.0,23.0,13.0,59.0,68.0,3.0,103.0,81.0,84.0,28.0,1.0,24.0,31.0,50.0,41.0,115.0,15.0,123.0,29.0,16.0,3.0,3.0,32.0,35.0,65.0,63.0,63.0,95.0,4.0,280.0
+12.0,14.0,4.0,17.0,16.0,9.0,4.0,7.0,8.0,12.0,20.0,0.0,0.0,2.0,13.0,14.0,2.0,23.0,6.0,9.0,15.0,14.0,12.0,11.0,12.0,14.0,17.0,15.0,0.0,20.0,12.0,16.0,6.0,2.0,4.0,13.0,25.0,35.0,2.0,22.0,11.0,14.0,7.0,7.0,1.0,42.0,1.0,49.0,20.0,18.0,23.0,8.0,8.0,6.0,8.0,35.0,19.0,25.0,4.0,4.0,40.0,14.0,16.0,49.0,34.0,14.0,22.0,17.0,31.0,14.0,34.0,26.0,44.0,45.0,30.0,25.0,28.0,9.0,22.0,65.0,9.0,44.0,4.0,94.0,75.0,58.0,45.0,47.0,90.0,110.0,16.0,31.0,72.0,95.0,111.0,137.0,137.0,96.0,6.0,476.0
+1.0,0.0,0.0,1.0,0.0,1.0,2.0,1.0,0.0,1.0,0.0,5.0,1.0,0.0,0.0,1.0,2.0,3.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,14.0,3.0,0.0,4.0,0.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,3.0,4.0,1.0,3.0,0.0,2.0,2.0,0.0,7.0,0.0,0.0,1.0,0.0,1.0,2.0,4.0,0.0,2.0,12.0,0.0,7.0,22.0,35.0,11.0,22.0,0.0,13.0,9.0,0.0,3.0,182.0,4.0,2.0,36.0,7.0,26.0,2.0,8.0,1.0,10.0,5.0,5.0,2.0,37.0,16.0,15.0,1.0,25.0
+19.0,9.0,7.0,18.0,5.0,18.0,6.0,11.0,21.0,6.0,14.0,2.0,0.0,3.0,2.0,17.0,4.0,0.0,10.0,8.0,19.0,5.0,13.0,5.0,19.0,35.0,10.0,12.0,0.0,41.0,36.0,18.0,13.0,2.0,9.0,21.0,4.0,21.0,13.0,24.0,21.0,4.0,11.0,14.0,0.0,4.0,7.0,45.0,4.0,4.0,9.0,7.0,14.0,152.0,8.0,32.0,24.0,9.0,7.0,1.0,25.0,11.0,14.0,20.0,13.0,17.0,23.0,32.0,26.0,9.0,61.0,15.0,37.0,67.0,88.0,8.0,72.0,3.0,91.0,32.0,11.0,63.0,145.0,86.0,53.0,104.0,42.0,78.0,60.0,94.0,35.0,31.0,56.0,103.0,59.0,121.0,216.0,118.0,3.0,383.0
+0.0,1.0,0.0,1.0,9.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,2.0,3.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,1.0,4.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,2.0,5.0,5.0,1.0,0.0,2.0,1.0,1.0,1.0,0.0,5.0,0.0,1.0,0.0,3.0,3.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,4.0,0.0,3.0,2.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,0.0,1.0,0.0,1.0,2.0,0.0,2.0,3.0,5.0,3.0,11.0,0.0,2.0,11.0,0.0,1.0,1.0,2.0,1.0,8.0,5.0,36.0,0.0,0.0,2.0
+7.0,9.0,0.0,10.0,17.0,18.0,7.0,20.0,18.0,15.0,15.0,6.0,9.0,8.0,1.0,10.0,2.0,56.0,8.0,6.0,28.0,6.0,10.0,2.0,6.0,23.0,8.0,6.0,5.0,8.0,21.0,3.0,13.0,0.0,0.0,1.0,14.0,51.0,1.0,19.0,5.0,9.0,12.0,7.0,4.0,7.0,3.0,56.0,37.0,2.0,16.0,0.0,9.0,3.0,8.0,38.0,19.0,35.0,2.0,0.0,47.0,12.0,20.0,43.0,8.0,13.0,27.0,21.0,50.0,14.0,31.0,36.0,40.0,32.0,40.0,2.0,35.0,20.0,38.0,84.0,3.0,43.0,12.0,71.0,74.0,26.0,33.0,41.0,52.0,70.0,53.0,1.0,27.0,90.0,165.0,69.0,102.0,160.0,3.0,525.0
+25.0,35.0,3.0,32.0,31.0,18.0,14.0,40.0,40.0,10.0,16.0,18.0,2.0,20.0,4.0,23.0,15.0,1.0,12.0,19.0,24.0,5.0,46.0,31.0,55.0,25.0,44.0,54.0,7.0,18.0,43.0,55.0,19.0,4.0,6.0,25.0,30.0,19.0,31.0,67.0,50.0,19.0,28.0,44.0,25.0,34.0,10.0,40.0,15.0,16.0,51.0,8.0,30.0,15.0,35.0,9.0,20.0,42.0,25.0,1.0,47.0,19.0,40.0,52.0,29.0,115.0,20.0,60.0,47.0,15.0,36.0,38.0,36.0,68.0,59.0,12.0,90.0,4.0,93.0,81.0,12.0,34.0,38.0,68.0,86.0,122.0,96.0,121.0,136.0,42.0,122.0,2.0,70.0,165.0,174.0,189.0,282.0,202.0,354.0,338.0
+4.0,7.0,2.0,9.0,4.0,12.0,4.0,16.0,11.0,13.0,11.0,1.0,10.0,1.0,1.0,4.0,0.0,0.0,8.0,6.0,7.0,11.0,13.0,21.0,4.0,11.0,9.0,18.0,0.0,2.0,13.0,13.0,7.0,3.0,5.0,20.0,21.0,17.0,3.0,15.0,14.0,1.0,4.0,15.0,2.0,22.0,6.0,38.0,11.0,3.0,38.0,0.0,5.0,3.0,0.0,11.0,3.0,14.0,2.0,1.0,9.0,5.0,11.0,31.0,9.0,7.0,10.0,24.0,32.0,5.0,17.0,19.0,17.0,21.0,28.0,4.0,10.0,5.0,18.0,55.0,5.0,11.0,13.0,57.0,37.0,33.0,25.0,30.0,76.0,11.0,15.0,16.0,32.0,69.0,103.0,129.0,150.0,13.0,0.0,132.0
+21.0,11.0,6.0,6.0,5.0,7.0,5.0,12.0,19.0,8.0,8.0,25.0,5.0,16.0,6.0,28.0,4.0,13.0,27.0,15.0,14.0,7.0,9.0,26.0,42.0,13.0,12.0,17.0,30.0,18.0,16.0,33.0,37.0,3.0,7.0,26.0,12.0,44.0,12.0,15.0,29.0,15.0,36.0,23.0,0.0,25.0,5.0,41.0,16.0,28.0,16.0,9.0,31.0,7.0,6.0,9.0,15.0,54.0,41.0,20.0,27.0,7.0,42.0,13.0,38.0,0.0,32.0,60.0,42.0,4.0,53.0,13.0,26.0,36.0,30.0,6.0,43.0,9.0,39.0,59.0,29.0,43.0,102.0,67.0,77.0,55.0,99.0,64.0,124.0,37.0,131.0,4.0,160.0,254.0,219.0,36.0,185.0,288.0,7.0,415.0
+4.0,2.0,0.0,6.0,10.0,0.0,2.0,2.0,6.0,2.0,3.0,0.0,3.0,1.0,0.0,1.0,0.0,0.0,2.0,3.0,3.0,2.0,4.0,13.0,2.0,0.0,2.0,3.0,0.0,10.0,1.0,4.0,2.0,1.0,3.0,3.0,14.0,17.0,2.0,7.0,9.0,2.0,3.0,4.0,4.0,20.0,0.0,12.0,2.0,0.0,8.0,1.0,2.0,4.0,0.0,4.0,1.0,4.0,1.0,0.0,6.0,2.0,7.0,14.0,0.0,0.0,3.0,4.0,3.0,8.0,9.0,5.0,16.0,18.0,14.0,5.0,9.0,10.0,5.0,12.0,1.0,10.0,7.0,24.0,15.0,7.0,8.0,7.0,30.0,10.0,9.0,2.0,15.0,15.0,27.0,28.0,72.0,8.0,6.0,41.0
+5.0,6.0,1.0,14.0,14.0,11.0,1.0,6.0,9.0,20.0,12.0,13.0,0.0,9.0,3.0,4.0,17.0,4.0,6.0,5.0,5.0,0.0,18.0,2.0,7.0,6.0,7.0,12.0,0.0,10.0,13.0,8.0,19.0,2.0,13.0,4.0,13.0,0.0,4.0,15.0,14.0,7.0,10.0,7.0,0.0,8.0,20.0,0.0,16.0,7.0,9.0,6.0,8.0,9.0,13.0,6.0,8.0,20.0,9.0,4.0,25.0,7.0,19.0,11.0,11.0,4.0,6.0,26.0,17.0,1.0,10.0,12.0,5.0,26.0,42.0,8.0,34.0,0.0,39.0,50.0,4.0,4.0,46.0,23.0,19.0,58.0,43.0,52.0,29.0,2.0,246.0,93.0,25.0,81.0,98.0,3.0,123.0,64.0,3.0,44.0
+7.0,6.0,0.0,5.0,22.0,8.0,5.0,10.0,8.0,20.0,3.0,1.0,2.0,3.0,1.0,1.0,0.0,0.0,2.0,12.0,11.0,0.0,3.0,24.0,3.0,9.0,7.0,9.0,0.0,13.0,14.0,5.0,0.0,9.0,2.0,4.0,30.0,28.0,1.0,14.0,13.0,1.0,4.0,6.0,16.0,19.0,0.0,56.0,18.0,2.0,23.0,1.0,3.0,3.0,2.0,2.0,13.0,14.0,1.0,0.0,10.0,3.0,10.0,17.0,1.0,1.0,9.0,7.0,33.0,2.0,13.0,35.0,10.0,19.0,11.0,0.0,20.0,4.0,21.0,58.0,4.0,7.0,24.0,42.0,33.0,28.0,4.0,39.0,53.0,10.0,13.0,1.0,20.0,31.0,75.0,45.0,160.0,12.0,4.0,65.0
+8.0,15.0,0.0,4.0,2.0,10.0,12.0,11.0,6.0,37.0,8.0,7.0,15.0,4.0,5.0,18.0,0.0,0.0,10.0,19.0,20.0,50.0,13.0,12.0,13.0,11.0,7.0,12.0,5.0,0.0,32.0,5.0,8.0,6.0,8.0,8.0,12.0,45.0,8.0,14.0,15.0,2.0,13.0,9.0,14.0,4.0,3.0,56.0,15.0,9.0,26.0,0.0,17.0,7.0,10.0,25.0,13.0,19.0,11.0,0.0,16.0,6.0,13.0,35.0,27.0,14.0,14.0,30.0,31.0,10.0,12.0,20.0,31.0,28.0,26.0,6.0,28.0,7.0,20.0,39.0,5.0,30.0,56.0,72.0,41.0,66.0,39.0,37.0,73.0,57.0,36.0,177.0,74.0,77.0,103.0,161.0,127.0,86.0,0.0,287.0
+0.0,0.0,0.0,0.0,5.0,4.0,0.0,0.0,3.0,3.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,2.0,2.0,3.0,0.0,8.0,1.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,5.0,1.0,1.0,0.0,1.0,0.0,27.0,0.0,0.0,0.0,0.0,8.0,0.0,1.0,1.0,0.0,2.0,0.0,2.0,0.0,0.0,1.0,3.0,0.0,5.0,0.0,0.0,0.0,2.0,0.0,0.0,13.0,0.0,0.0,8.0,7.0,0.0,4.0,0.0,15.0,1.0,1.0,0.0,11.0,1.0,7.0,13.0,1.0,24.0,2.0,0.0,2.0,0.0,0.0,2.0,1.0,0.0,1.0,4.0,2.0,1.0
+5.0,3.0,8.0,2.0,2.0,1.0,6.0,6.0,3.0,16.0,8.0,4.0,1.0,7.0,1.0,7.0,2.0,0.0,1.0,7.0,6.0,0.0,7.0,1.0,3.0,3.0,4.0,1.0,2.0,5.0,7.0,1.0,10.0,3.0,11.0,3.0,13.0,1.0,12.0,6.0,2.0,4.0,1.0,6.0,0.0,26.0,18.0,0.0,8.0,5.0,15.0,7.0,11.0,8.0,3.0,2.0,16.0,11.0,6.0,1.0,13.0,0.0,16.0,4.0,2.0,16.0,20.0,10.0,14.0,13.0,15.0,11.0,6.0,11.0,26.0,1.0,26.0,4.0,22.0,25.0,17.0,12.0,7.0,14.0,37.0,24.0,19.0,27.0,38.0,14.0,3.0,25.0,10.0,33.0,70.0,41.0,55.0,75.0,337.0,52.0
+8.0,3.0,2.0,3.0,5.0,13.0,1.0,4.0,10.0,4.0,0.0,12.0,0.0,3.0,0.0,3.0,7.0,4.0,3.0,16.0,2.0,0.0,19.0,0.0,10.0,3.0,0.0,8.0,0.0,5.0,9.0,0.0,7.0,0.0,5.0,0.0,1.0,0.0,4.0,7.0,0.0,10.0,11.0,0.0,0.0,4.0,10.0,0.0,4.0,0.0,5.0,6.0,10.0,1.0,2.0,8.0,22.0,18.0,9.0,4.0,5.0,0.0,0.0,9.0,21.0,13.0,12.0,10.0,18.0,14.0,5.0,3.0,8.0,9.0,4.0,15.0,14.0,2.0,12.0,19.0,31.0,22.0,7.0,8.0,45.0,10.0,24.0,25.0,0.0,25.0,11.0,20.0,19.0,69.0,46.0,16.0,2.0,18.0,222.0,54.0
+0.0,0.0,0.0,0.0,1.0,0.0,1.0,2.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,2.0,6.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,3.0,0.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,0.0,1.0,4.0,0.0,0.0,4.0,3.0,1.0,0.0,0.0,0.0,1.0,2.0,7.0,4.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,3.0,3.0,1.0,1.0,0.0,1.0,0.0,0.0,3.0,3.0,0.0,0.0,5.0,2.0,1.0,0.0,4.0,8.0,4.0,4.0,1.0,1.0,3.0,5.0,1.0,4.0,5.0,5.0,3.0,13.0,6.0,0.0,32.0
+4.0,1.0,5.0,0.0,1.0,0.0,0.0,3.0,0.0,0.0,0.0,1.0,1.0,5.0,5.0,2.0,3.0,1.0,0.0,2.0,1.0,0.0,0.0,0.0,2.0,1.0,2.0,1.0,0.0,4.0,3.0,0.0,4.0,0.0,7.0,0.0,0.0,2.0,1.0,0.0,0.0,2.0,1.0,0.0,0.0,2.0,3.0,1.0,0.0,4.0,1.0,5.0,2.0,0.0,2.0,2.0,1.0,4.0,5.0,2.0,5.0,0.0,0.0,2.0,1.0,1.0,3.0,3.0,0.0,4.0,4.0,1.0,5.0,3.0,9.0,8.0,9.0,3.0,6.0,2.0,4.0,6.0,1.0,5.0,6.0,14.0,4.0,6.0,1.0,2.0,2.0,4.0,15.0,5.0,10.0,19.0,7.0,17.0,9.0,34.0
+5.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,3.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,3.0,2.0,2.0,0.0,0.0,2.0,3.0,2.0,1.0,0.0,3.0,0.0,0.0,3.0,1.0,1.0,2.0,2.0,1.0,5.0,0.0,2.0,4.0,2.0,1.0,5.0,2.0,0.0,0.0,0.0,6.0,0.0,2.0,1.0,4.0,3.0,7.0,6.0,1.0,4.0,6.0,8.0,4.0,5.0,14.0,8.0,8.0,5.0,1.0,26.0
+2.0,2.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,4.0,3.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,3.0,0.0,0.0,2.0,2.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,2.0,1.0,4.0,0.0,2.0,1.0,0.0,2.0,2.0,0.0,2.0,1.0,1.0,0.0,0.0,6.0,4.0,2.0,2.0,2.0,1.0,4.0,2.0,6.0,0.0,3.0,0.0,1.0,4.0,1.0,0.0,2.0,5.0,4.0,8.0,2.0,1.0,7.0,5.0,4.0,3.0,1.0,2.0,1.0,5.0,5.0,7.0,1.0,7.0,7.0,6.0,3.0,2.0,7.0,11.0,1.0,4.0,3.0,9.0,10.0,14.0,27.0,12.0,3.0,44.0
+8.0,2.0,0.0,2.0,1.0,1.0,2.0,7.0,1.0,6.0,3.0,3.0,11.0,0.0,0.0,7.0,0.0,1.0,14.0,3.0,1.0,0.0,0.0,11.0,7.0,20.0,2.0,5.0,0.0,16.0,10.0,0.0,6.0,0.0,2.0,5.0,3.0,6.0,7.0,4.0,0.0,0.0,4.0,2.0,10.0,4.0,0.0,8.0,2.0,1.0,18.0,6.0,9.0,5.0,4.0,3.0,6.0,5.0,6.0,1.0,5.0,1.0,4.0,4.0,14.0,4.0,14.0,9.0,13.0,9.0,11.0,3.0,17.0,13.0,27.0,12.0,20.0,7.0,14.0,13.0,32.0,30.0,21.0,20.0,30.0,23.0,27.0,17.0,23.0,32.0,6.0,8.0,46.0,33.0,39.0,43.0,27.0,25.0,19.0,120.0
+1.0,2.0,0.0,2.0,0.0,0.0,0.0,2.0,3.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,0.0,1.0,1.0,1.0,1.0,0.0,4.0,0.0,1.0,1.0,0.0,1.0,3.0,1.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,1.0,3.0,1.0,0.0,1.0,1.0,1.0,3.0,0.0,0.0,4.0,1.0,0.0,3.0,0.0,0.0,0.0,1.0,2.0,4.0,5.0,5.0,2.0,1.0,3.0,3.0,5.0,2.0,0.0,1.0,4.0,0.0,4.0,0.0,3.0,4.0,3.0,2.0,3.0,9.0,9.0,7.0,7.0,5.0,4.0,3.0,3.0,6.0,8.0,8.0,10.0,7.0,9.0,14.0,23.0,30.0
+29.0,12.0,17.0,19.0,30.0,14.0,13.0,22.0,20.0,12.0,18.0,45.0,38.0,38.0,4.0,27.0,60.0,0.0,36.0,32.0,25.0,8.0,19.0,7.0,33.0,22.0,13.0,19.0,2.0,32.0,36.0,15.0,36.0,8.0,45.0,11.0,15.0,47.0,59.0,40.0,17.0,54.0,51.0,19.0,0.0,9.0,52.0,27.0,16.0,30.0,33.0,78.0,61.0,9.0,25.0,65.0,68.0,38.0,65.0,93.0,67.0,12.0,19.0,21.0,54.0,138.0,97.0,78.0,92.0,61.0,41.0,31.0,92.0,39.0,41.0,83.0,60.0,8.0,40.0,100.0,68.0,63.0,27.0,84.0,131.0,55.0,106.0,36.0,67.0,146.0,28.0,99.0,170.0,210.0,222.0,299.0,177.0,327.0,324.0,619.0
+0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,1.0,0.0,2.0,1.0,4.0,3.0,0.0,3.0,0.0,5.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,3.0,1.0,0.0,3.0,2.0,3.0,6.0,1.0,0.0,2.0,1.0,2.0,1.0,7.0,0.0,1.0,1.0,2.0,1.0,5.0,0.0,3.0,6.0,0.0,8.0,4.0,10.0,4.0,7.0,5.0,4.0,8.0,3.0,2.0,2.0,2.0,4.0,12.0,15.0,21.0,4.0,1.0,11.0
+0.0,0.0,0.0,7.0,3.0,0.0,1.0,0.0,1.0,1.0,2.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,1.0,0.0,3.0,0.0,0.0,1.0,1.0,3.0,0.0,1.0,3.0,0.0,0.0,1.0,1.0,2.0,3.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,1.0,0.0,5.0,0.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0,1.0,1.0,4.0,0.0,1.0,3.0,1.0,0.0,2.0,7.0,0.0,0.0,1.0,0.0,0.0,2.0,3.0,0.0,1.0,1.0,5.0,2.0,4.0,0.0,1.0,3.0,8.0,3.0,3.0,5.0,2.0,2.0,4.0,3.0,6.0,3.0,7.0,3.0,9.0,1.0,4.0,20.0
+4.0,3.0,18.0,3.0,3.0,4.0,1.0,5.0,1.0,4.0,0.0,2.0,0.0,6.0,0.0,1.0,1.0,14.0,3.0,3.0,3.0,1.0,5.0,1.0,2.0,2.0,3.0,2.0,0.0,3.0,4.0,3.0,13.0,0.0,3.0,0.0,1.0,3.0,7.0,6.0,1.0,6.0,2.0,2.0,0.0,1.0,13.0,1.0,5.0,2.0,0.0,17.0,4.0,0.0,3.0,5.0,9.0,10.0,12.0,30.0,5.0,2.0,2.0,4.0,9.0,3.0,2.0,7.0,8.0,2.0,15.0,8.0,8.0,1.0,10.0,4.0,7.0,4.0,10.0,7.0,18.0,13.0,8.0,10.0,15.0,13.0,27.0,11.0,9.0,5.0,6.0,23.0,20.0,19.0,22.0,44.0,21.0,25.0,61.0,65.0
+10.0,1.0,7.0,2.0,2.0,4.0,0.0,7.0,5.0,3.0,4.0,3.0,2.0,3.0,0.0,11.0,6.0,2.0,4.0,11.0,11.0,0.0,1.0,1.0,9.0,0.0,3.0,6.0,9.0,9.0,1.0,1.0,7.0,1.0,4.0,0.0,3.0,2.0,18.0,12.0,2.0,6.0,19.0,4.0,0.0,4.0,5.0,2.0,5.0,0.0,6.0,1.0,6.0,11.0,5.0,13.0,10.0,12.0,7.0,5.0,17.0,5.0,1.0,8.0,13.0,17.0,10.0,16.0,19.0,11.0,19.0,2.0,22.0,27.0,22.0,8.0,42.0,5.0,26.0,11.0,23.0,20.0,15.0,13.0,17.0,44.0,19.0,24.0,6.0,26.0,10.0,14.0,46.0,51.0,23.0,53.0,22.0,49.0,57.0,177.0
+0.0,3.0,1.0,3.0,0.0,0.0,1.0,4.0,1.0,0.0,3.0,2.0,0.0,14.0,3.0,1.0,10.0,4.0,12.0,5.0,3.0,1.0,3.0,0.0,1.0,2.0,1.0,0.0,0.0,4.0,2.0,1.0,2.0,0.0,8.0,0.0,0.0,8.0,0.0,4.0,0.0,8.0,2.0,3.0,1.0,1.0,8.0,8.0,1.0,6.0,1.0,10.0,12.0,0.0,6.0,11.0,10.0,4.0,9.0,7.0,6.0,2.0,3.0,3.0,4.0,2.0,8.0,2.0,7.0,7.0,6.0,0.0,12.0,6.0,7.0,15.0,11.0,9.0,11.0,8.0,17.0,16.0,7.0,7.0,12.0,11.0,19.0,4.0,8.0,16.0,15.0,35.0,20.0,39.0,24.0,16.0,15.0,22.0,14.0,79.0
+0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,3.0,2.0,1.0,0.0,2.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,3.0,0.0,1.0,0.0,1.0,0.0,2.0,1.0,0.0,5.0,1.0,0.0,1.0,0.0,0.0,2.0,0.0,1.0,0.0,1.0,4.0,0.0,0.0,0.0,3.0,1.0,1.0,4.0,1.0,0.0,0.0,0.0,1.0,1.0,3.0,2.0,0.0,2.0,1.0,1.0,6.0,2.0,5.0,2.0,3.0,1.0,2.0,2.0,5.0,3.0,2.0,3.0,1.0,5.0,6.0,6.0,3.0,12.0,5.0,6.0,2.0,2.0,6.0,7.0,8.0,5.0,7.0,9.0,9.0,15.0
+6.0,0.0,5.0,4.0,0.0,1.0,2.0,8.0,13.0,3.0,3.0,4.0,0.0,5.0,0.0,8.0,6.0,0.0,7.0,12.0,5.0,0.0,4.0,1.0,5.0,0.0,2.0,2.0,10.0,4.0,12.0,0.0,10.0,0.0,11.0,0.0,1.0,0.0,7.0,2.0,1.0,8.0,10.0,0.0,4.0,7.0,2.0,0.0,11.0,1.0,5.0,3.0,11.0,3.0,21.0,10.0,8.0,8.0,9.0,1.0,18.0,3.0,1.0,8.0,27.0,11.0,24.0,18.0,3.0,17.0,2.0,5.0,19.0,15.0,17.0,6.0,18.0,0.0,19.0,23.0,27.0,59.0,1.0,11.0,25.0,32.0,25.0,26.0,3.0,23.0,5.0,5.0,36.0,21.0,25.0,23.0,8.0,52.0,100.0,193.0
+4.0,1.0,1.0,1.0,2.0,2.0,0.0,5.0,6.0,2.0,0.0,6.0,4.0,14.0,2.0,6.0,6.0,0.0,2.0,4.0,4.0,1.0,2.0,0.0,3.0,2.0,0.0,0.0,1.0,7.0,7.0,0.0,3.0,0.0,2.0,1.0,4.0,4.0,5.0,3.0,1.0,4.0,3.0,3.0,8.0,0.0,8.0,1.0,9.0,6.0,7.0,3.0,10.0,4.0,11.0,3.0,4.0,9.0,6.0,2.0,9.0,1.0,0.0,3.0,8.0,4.0,7.0,5.0,6.0,4.0,8.0,4.0,13.0,4.0,8.0,4.0,8.0,2.0,10.0,6.0,4.0,8.0,3.0,9.0,11.0,19.0,17.0,11.0,8.0,13.0,14.0,37.0,22.0,27.0,20.0,41.0,30.0,25.0,25.0,103.0
+1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,4.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,0.0,1.0,4.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,2.0,1.0,0.0,2.0,0.0,0.0,1.0,4.0,4.0,0.0,1.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,3.0,0.0,0.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,4.0,0.0,2.0,0.0,0.0,1.0,4.0,0.0,3.0,2.0,1.0,0.0,1.0,2.0,2.0,0.0,6.0,1.0,2.0,3.0,8.0,9.0,5.0,1.0,8.0,4.0,10.0,0.0,0.0,1.0,2.0,23.0,3.0,28.0,5.0,0.0,46.0
+1.0,1.0,1.0,0.0,0.0,1.0,1.0,4.0,2.0,2.0,1.0,3.0,0.0,4.0,0.0,4.0,5.0,13.0,5.0,1.0,1.0,0.0,1.0,0.0,0.0,2.0,1.0,1.0,5.0,0.0,3.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,2.0,1.0,1.0,2.0,0.0,3.0,2.0,4.0,0.0,0.0,2.0,3.0,0.0,2.0,7.0,8.0,5.0,1.0,0.0,0.0,2.0,2.0,1.0,3.0,2.0,1.0,4.0,9.0,2.0,0.0,3.0,4.0,2.0,2.0,9.0,6.0,0.0,1.0,3.0,9.0,3.0,0.0,8.0,11.0,8.0,7.0,1.0,5.0,13.0,1.0,0.0,6.0,11.0,20.0,24.0,8.0,12.0,63.0,54.0
+4.0,13.0,0.0,9.0,6.0,13.0,6.0,1.0,13.0,18.0,8.0,2.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,1.0,4.0,3.0,10.0,11.0,0.0,6.0,12.0,11.0,0.0,1.0,3.0,22.0,3.0,1.0,1.0,14.0,12.0,1.0,1.0,28.0,13.0,0.0,3.0,22.0,0.0,13.0,0.0,0.0,5.0,0.0,9.0,0.0,2.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,5.0,8.0,15.0,22.0,3.0,1.0,1.0,11.0,9.0,2.0,10.0,31.0,7.0,7.0,6.0,0.0,10.0,1.0,12.0,10.0,0.0,2.0,7.0,5.0,11.0,21.0,1.0,8.0,46.0,0.0,14.0,1.0,10.0,37.0,21.0,2.0,109.0,23.0,1.0,14.0
+2.0,6.0,0.0,1.0,19.0,6.0,0.0,5.0,5.0,6.0,4.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,2.0,2.0,1.0,8.0,2.0,3.0,1.0,5.0,2.0,0.0,14.0,3.0,5.0,2.0,8.0,0.0,1.0,14.0,11.0,1.0,8.0,2.0,1.0,3.0,6.0,15.0,13.0,0.0,18.0,7.0,0.0,2.0,0.0,0.0,2.0,1.0,3.0,4.0,1.0,0.0,0.0,6.0,4.0,1.0,8.0,2.0,0.0,6.0,7.0,7.0,0.0,18.0,18.0,5.0,5.0,12.0,0.0,17.0,2.0,13.0,23.0,0.0,1.0,3.0,8.0,33.0,22.0,7.0,26.0,15.0,8.0,1.0,1.0,12.0,14.0,16.0,15.0,64.0,5.0,1.0,29.0
+25.0,7.0,10.0,10.0,10.0,7.0,4.0,11.0,7.0,7.0,22.0,13.0,25.0,24.0,1.0,16.0,1.0,0.0,9.0,10.0,29.0,11.0,6.0,12.0,9.0,4.0,9.0,15.0,55.0,23.0,20.0,0.0,21.0,0.0,18.0,0.0,6.0,11.0,17.0,13.0,7.0,20.0,19.0,1.0,3.0,13.0,21.0,15.0,8.0,5.0,19.0,15.0,17.0,7.0,22.0,51.0,14.0,27.0,19.0,7.0,26.0,13.0,4.0,34.0,31.0,18.0,32.0,25.0,18.0,67.0,41.0,22.0,37.0,35.0,45.0,42.0,54.0,4.0,66.0,20.0,70.0,48.0,9.0,45.0,36.0,56.0,66.0,48.0,20.0,49.0,24.0,75.0,65.0,112.0,126.0,111.0,34.0,118.0,325.0,451.0
+20.0,7.0,31.0,7.0,6.0,5.0,16.0,15.0,14.0,20.0,15.0,21.0,0.0,17.0,0.0,7.0,17.0,0.0,30.0,17.0,15.0,0.0,12.0,0.0,11.0,7.0,10.0,7.0,4.0,16.0,12.0,0.0,18.0,3.0,26.0,1.0,6.0,4.0,36.0,11.0,0.0,51.0,21.0,1.0,0.0,14.0,27.0,0.0,14.0,7.0,14.0,20.0,19.0,3.0,13.0,45.0,24.0,47.0,36.0,51.0,39.0,8.0,5.0,30.0,29.0,20.0,32.0,43.0,29.0,16.0,34.0,15.0,46.0,19.0,31.0,94.0,40.0,1.0,40.0,45.0,73.0,36.0,40.0,42.0,60.0,90.0,49.0,23.0,7.0,45.0,11.0,76.0,71.0,93.0,137.0,114.0,36.0,178.0,487.0,248.0
+34.0,17.0,36.0,9.0,14.0,9.0,8.0,21.0,13.0,6.0,7.0,32.0,16.0,25.0,7.0,37.0,48.0,7.0,32.0,27.0,37.0,3.0,19.0,2.0,27.0,4.0,16.0,15.0,1.0,13.0,32.0,1.0,53.0,9.0,32.0,2.0,8.0,12.0,32.0,17.0,7.0,66.0,61.0,2.0,0.0,14.0,66.0,2.0,4.0,8.0,23.0,90.0,75.0,16.0,53.0,52.0,65.0,45.0,97.0,102.0,80.0,11.0,10.0,25.0,66.0,46.0,113.0,70.0,49.0,65.0,22.0,26.0,60.0,16.0,37.0,152.0,33.0,1.0,27.0,69.0,128.0,71.0,75.0,54.0,124.0,76.0,87.0,43.0,18.0,159.0,22.0,57.0,129.0,114.0,195.0,168.0,55.0,203.0,351.0,856.0
+10.0,4.0,0.0,3.0,11.0,8.0,7.0,9.0,4.0,5.0,11.0,14.0,13.0,7.0,1.0,11.0,6.0,0.0,2.0,9.0,25.0,0.0,11.0,2.0,8.0,11.0,6.0,3.0,10.0,5.0,12.0,5.0,14.0,2.0,3.0,9.0,11.0,21.0,14.0,6.0,10.0,7.0,15.0,10.0,18.0,22.0,7.0,11.0,12.0,2.0,7.0,13.0,27.0,7.0,12.0,25.0,17.0,22.0,20.0,0.0,22.0,10.0,9.0,22.0,19.0,23.0,24.0,24.0,21.0,34.0,31.0,19.0,47.0,10.0,23.0,31.0,37.0,3.0,40.0,38.0,26.0,38.0,8.0,61.0,67.0,44.0,53.0,32.0,33.0,58.0,48.0,87.0,57.0,109.0,83.0,123.0,111.0,160.0,24.0,424.0
+19.0,8.0,15.0,13.0,14.0,10.0,25.0,31.0,5.0,2.0,5.0,34.0,0.0,36.0,18.0,32.0,58.0,21.0,43.0,27.0,26.0,1.0,23.0,4.0,14.0,4.0,8.0,11.0,84.0,27.0,32.0,4.0,42.0,5.0,27.0,0.0,2.0,17.0,16.0,25.0,2.0,46.0,47.0,3.0,0.0,2.0,51.0,8.0,6.0,22.0,10.0,52.0,43.0,5.0,47.0,73.0,58.0,37.0,62.0,35.0,45.0,7.0,2.0,29.0,75.0,23.0,57.0,37.0,56.0,40.0,49.0,37.0,62.0,44.0,50.0,139.0,70.0,0.0,41.0,54.0,106.0,120.0,27.0,30.0,92.0,87.0,104.0,72.0,12.0,112.0,20.0,112.0,140.0,157.0,163.0,153.0,33.0,249.0,486.0,558.0
+16.0,17.0,33.0,11.0,24.0,9.0,16.0,15.0,11.0,2.0,13.0,35.0,3.0,34.0,7.0,25.0,34.0,2.0,27.0,26.0,18.0,1.0,18.0,1.0,34.0,6.0,12.0,9.0,58.0,15.0,32.0,0.0,52.0,0.0,12.0,5.0,4.0,8.0,18.0,25.0,8.0,39.0,42.0,3.0,0.0,6.0,20.0,1.0,35.0,21.0,6.0,13.0,35.0,11.0,67.0,54.0,49.0,48.0,24.0,65.0,51.0,7.0,8.0,26.0,58.0,54.0,62.0,31.0,34.0,28.0,22.0,20.0,55.0,20.0,34.0,9.0,50.0,2.0,30.0,82.0,77.0,156.0,11.0,47.0,120.0,48.0,53.0,45.0,10.0,104.0,37.0,75.0,151.0,123.0,158.0,169.0,47.0,109.0,922.0,693.0
+25.0,15.0,17.0,13.0,4.0,7.0,22.0,33.0,12.0,30.0,27.0,28.0,1.0,22.0,10.0,26.0,15.0,55.0,21.0,45.0,17.0,9.0,9.0,12.0,22.0,3.0,8.0,20.0,5.0,55.0,42.0,1.0,50.0,6.0,43.0,2.0,9.0,26.0,28.0,20.0,4.0,48.0,33.0,7.0,65.0,8.0,33.0,7.0,30.0,47.0,46.0,52.0,51.0,9.0,96.0,62.0,78.0,60.0,49.0,28.0,58.0,16.0,4.0,49.0,56.0,16.0,82.0,40.0,41.0,65.0,76.0,13.0,52.0,40.0,52.0,72.0,88.0,7.0,63.0,69.0,120.0,165.0,39.0,54.0,93.0,122.0,174.0,62.0,21.0,142.0,23.0,132.0,137.0,87.0,167.0,232.0,42.0,318.0,1076.0,750.0
+11.0,11.0,30.0,8.0,12.0,8.0,11.0,13.0,11.0,20.0,11.0,24.0,1.0,33.0,2.0,22.0,13.0,2.0,20.0,12.0,33.0,2.0,22.0,2.0,8.0,8.0,11.0,8.0,2.0,13.0,18.0,6.0,27.0,2.0,24.0,6.0,9.0,0.0,26.0,11.0,4.0,6.0,12.0,4.0,1.0,11.0,64.0,1.0,6.0,16.0,13.0,19.0,22.0,8.0,15.0,19.0,19.0,62.0,17.0,27.0,22.0,11.0,5.0,15.0,10.0,4.0,20.0,48.0,26.0,16.0,28.0,17.0,15.0,33.0,43.0,113.0,61.0,0.0,45.0,40.0,30.0,26.0,36.0,29.0,38.0,59.0,67.0,57.0,24.0,20.0,210.0,17.0,39.0,49.0,216.0,24.0,55.0,103.0,58.0,113.0
+19.0,7.0,6.0,2.0,9.0,8.0,8.0,18.0,12.0,15.0,17.0,16.0,67.0,16.0,4.0,23.0,6.0,0.0,17.0,20.0,17.0,4.0,6.0,14.0,20.0,8.0,5.0,12.0,9.0,16.0,20.0,0.0,16.0,0.0,11.0,5.0,6.0,14.0,15.0,4.0,3.0,14.0,28.0,1.0,7.0,9.0,14.0,8.0,11.0,7.0,18.0,22.0,20.0,15.0,38.0,29.0,27.0,34.0,24.0,0.0,25.0,4.0,12.0,21.0,23.0,31.0,44.0,32.0,21.0,22.0,26.0,8.0,44.0,25.0,46.0,27.0,34.0,4.0,57.0,40.0,31.0,52.0,11.0,50.0,48.0,43.0,71.0,84.0,17.0,74.0,36.0,80.0,125.0,142.0,87.0,113.0,48.0,127.0,282.0,401.0
+15.0,13.0,48.0,8.0,13.0,10.0,7.0,28.0,17.0,22.0,21.0,16.0,11.0,36.0,2.0,56.0,27.0,1.0,39.0,66.0,36.0,4.0,14.0,9.0,34.0,6.0,14.0,23.0,50.0,22.0,40.0,5.0,33.0,1.0,19.0,8.0,24.0,15.0,43.0,28.0,9.0,54.0,40.0,11.0,0.0,19.0,50.0,3.0,18.0,4.0,50.0,56.0,50.0,55.0,29.0,55.0,72.0,73.0,46.0,12.0,52.0,14.0,14.0,35.0,48.0,78.0,51.0,72.0,49.0,50.0,54.0,31.0,71.0,59.0,64.0,63.0,81.0,1.0,63.0,76.0,122.0,65.0,119.0,66.0,95.0,129.0,90.0,120.0,32.0,139.0,70.0,74.0,181.0,200.0,193.0,205.0,112.0,170.0,320.0,691.0
+28.0,12.0,2.0,13.0,14.0,14.0,7.0,17.0,7.0,14.0,28.0,25.0,29.0,8.0,8.0,17.0,5.0,10.0,17.0,17.0,28.0,8.0,15.0,12.0,12.0,12.0,20.0,32.0,0.0,17.0,21.0,25.0,12.0,3.0,6.0,19.0,19.0,57.0,22.0,23.0,15.0,29.0,33.0,18.0,6.0,9.0,23.0,46.0,29.0,7.0,10.0,13.0,19.0,19.0,12.0,31.0,26.0,20.0,25.0,0.0,25.0,29.0,23.0,38.0,26.0,24.0,33.0,32.0,61.0,29.0,64.0,31.0,75.0,74.0,52.0,37.0,75.0,5.0,45.0,80.0,53.0,69.0,12.0,123.0,80.0,69.0,78.0,71.0,121.0,88.0,89.0,163.0,65.0,144.0,132.0,247.0,208.0,323.0,35.0,573.0
+20.0,4.0,15.0,7.0,17.0,10.0,6.0,17.0,14.0,20.0,10.0,27.0,10.0,33.0,1.0,25.0,11.0,0.0,24.0,23.0,34.0,6.0,11.0,17.0,12.0,36.0,10.0,23.0,19.0,6.0,20.0,6.0,31.0,7.0,16.0,16.0,16.0,14.0,24.0,35.0,15.0,15.0,21.0,8.0,0.0,28.0,34.0,21.0,12.0,3.0,42.0,6.0,40.0,17.0,17.0,27.0,27.0,50.0,49.0,11.0,34.0,8.0,13.0,30.0,17.0,30.0,31.0,49.0,48.0,13.0,23.0,9.0,53.0,23.0,40.0,21.0,49.0,10.0,43.0,95.0,40.0,36.0,73.0,65.0,101.0,83.0,58.0,41.0,53.0,46.0,117.0,150.0,112.0,162.0,160.0,176.0,105.0,198.0,104.0,413.0
+28.0,14.0,6.0,10.0,19.0,11.0,6.0,22.0,32.0,9.0,18.0,28.0,0.0,34.0,18.0,17.0,28.0,0.0,17.0,28.0,14.0,5.0,22.0,20.0,21.0,9.0,17.0,25.0,0.0,23.0,35.0,9.0,26.0,5.0,69.0,7.0,21.0,26.0,51.0,30.0,15.0,18.0,42.0,11.0,0.0,16.0,72.0,25.0,12.0,22.0,27.0,37.0,51.0,4.0,17.0,19.0,54.0,19.0,45.0,4.0,35.0,11.0,16.0,26.0,46.0,49.0,57.0,37.0,44.0,50.0,72.0,15.0,58.0,40.0,70.0,31.0,84.0,3.0,63.0,50.0,50.0,54.0,26.0,80.0,92.0,86.0,101.0,79.0,58.0,75.0,21.0,195.0,161.0,233.0,169.0,151.0,122.0,278.0,260.0,373.0
+18.0,9.0,5.0,8.0,6.0,9.0,4.0,6.0,8.0,3.0,6.0,11.0,6.0,4.0,8.0,4.0,1.0,5.0,12.0,13.0,10.0,3.0,14.0,7.0,16.0,11.0,6.0,5.0,0.0,20.0,15.0,1.0,14.0,5.0,28.0,5.0,10.0,16.0,10.0,13.0,3.0,18.0,37.0,2.0,0.0,6.0,21.0,9.0,13.0,61.0,7.0,28.0,37.0,0.0,39.0,14.0,28.0,16.0,33.0,0.0,21.0,6.0,2.0,30.0,24.0,12.0,19.0,33.0,29.0,23.0,55.0,20.0,31.0,49.0,68.0,45.0,59.0,6.0,48.0,26.0,66.0,29.0,3.0,38.0,61.0,101.0,86.0,45.0,19.0,66.0,21.0,41.0,85.0,135.0,85.0,108.0,46.0,91.0,180.0,220.0
+12.0,15.0,33.0,12.0,9.0,9.0,4.0,20.0,17.0,1.0,7.0,30.0,0.0,15.0,9.0,27.0,15.0,0.0,27.0,18.0,18.0,5.0,11.0,7.0,21.0,1.0,4.0,6.0,17.0,13.0,26.0,1.0,20.0,5.0,30.0,1.0,3.0,3.0,21.0,12.0,1.0,25.0,40.0,6.0,63.0,0.0,7.0,1.0,9.0,22.0,9.0,49.0,30.0,8.0,54.0,22.0,53.0,47.0,51.0,75.0,30.0,3.0,3.0,18.0,67.0,13.0,33.0,34.0,29.0,26.0,17.0,11.0,32.0,19.0,18.0,22.0,40.0,3.0,25.0,55.0,50.0,122.0,84.0,17.0,95.0,70.0,94.0,18.0,14.0,93.0,5.0,34.0,165.0,69.0,98.0,146.0,25.0,157.0,491.0,611.0
+10.0,5.0,35.0,7.0,8.0,8.0,5.0,16.0,5.0,0.0,3.0,13.0,0.0,18.0,0.0,8.0,11.0,1.0,14.0,21.0,7.0,1.0,11.0,4.0,12.0,1.0,5.0,2.0,23.0,6.0,21.0,0.0,19.0,4.0,13.0,0.0,1.0,1.0,7.0,7.0,0.0,29.0,13.0,0.0,64.0,1.0,21.0,3.0,24.0,2.0,0.0,4.0,20.0,5.0,24.0,39.0,28.0,45.0,20.0,5.0,30.0,6.0,0.0,7.0,12.0,17.0,19.0,12.0,31.0,15.0,17.0,14.0,23.0,11.0,18.0,19.0,36.0,1.0,17.0,35.0,34.0,57.0,12.0,24.0,58.0,27.0,39.0,24.0,1.0,34.0,9.0,58.0,45.0,26.0,70.0,63.0,8.0,79.0,671.0,334.0
+20.0,4.0,1.0,17.0,16.0,14.0,6.0,15.0,8.0,10.0,17.0,14.0,0.0,25.0,6.0,17.0,12.0,11.0,10.0,12.0,16.0,0.0,13.0,13.0,33.0,5.0,7.0,13.0,0.0,25.0,23.0,9.0,28.0,1.0,34.0,12.0,17.0,48.0,30.0,19.0,18.0,16.0,15.0,10.0,0.0,5.0,24.0,24.0,21.0,26.0,10.0,26.0,44.0,3.0,27.0,33.0,32.0,20.0,37.0,0.0,36.0,4.0,17.0,26.0,60.0,19.0,19.0,36.0,30.0,52.0,64.0,12.0,35.0,49.0,72.0,29.0,62.0,13.0,55.0,35.0,70.0,78.0,5.0,65.0,53.0,95.0,122.0,63.0,64.0,97.0,40.0,155.0,125.0,172.0,109.0,201.0,154.0,243.0,149.0,475.0
+16.0,2.0,16.0,2.0,4.0,6.0,8.0,14.0,5.0,5.0,14.0,25.0,11.0,32.0,1.0,13.0,26.0,0.0,37.0,16.0,17.0,12.0,4.0,0.0,14.0,3.0,4.0,6.0,22.0,18.0,12.0,0.0,15.0,6.0,33.0,2.0,14.0,1.0,30.0,8.0,0.0,40.0,26.0,0.0,10.0,2.0,31.0,1.0,2.0,4.0,12.0,52.0,42.0,7.0,42.0,29.0,32.0,31.0,28.0,26.0,43.0,4.0,0.0,25.0,29.0,23.0,21.0,36.0,26.0,36.0,24.0,18.0,44.0,18.0,27.0,76.0,36.0,2.0,34.0,32.0,82.0,51.0,30.0,27.0,39.0,31.0,70.0,37.0,5.0,60.0,29.0,54.0,80.0,80.0,87.0,115.0,22.0,127.0,398.0,391.0
+2.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,3.0,0.0,0.0,0.0,4.0,0.0,1.0,0.0,4.0,0.0,1.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,1.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,4.0,5.0,0.0,0.0,0.0,0.0,3.0,3.0,1.0,3.0,2.0,3.0,7.0,0.0,1.0,5.0,5.0,6.0,2.0,3.0,1.0,4.0,1.0,3.0,1.0,2.0,0.0,6.0,0.0,6.0,6.0,0.0,10.0
+5.0,19.0,1.0,20.0,4.0,11.0,21.0,6.0,27.0,11.0,14.0,9.0,0.0,4.0,6.0,13.0,11.0,0.0,5.0,6.0,9.0,8.0,14.0,10.0,15.0,1.0,11.0,12.0,0.0,12.0,13.0,7.0,18.0,0.0,24.0,5.0,9.0,1.0,23.0,26.0,14.0,0.0,19.0,10.0,1.0,5.0,12.0,0.0,11.0,9.0,12.0,3.0,16.0,8.0,13.0,1.0,8.0,6.0,13.0,0.0,13.0,18.0,12.0,29.0,14.0,0.0,2.0,18.0,27.0,0.0,13.0,21.0,6.0,22.0,12.0,0.0,31.0,1.0,15.0,28.0,4.0,9.0,28.0,56.0,31.0,29.0,139.0,33.0,35.0,0.0,15.0,33.0,76.0,27.0,62.0,2.0,67.0,38.0,44.0,58.0
+11.0,32.0,0.0,9.0,7.0,24.0,14.0,27.0,20.0,9.0,12.0,3.0,0.0,9.0,2.0,10.0,2.0,0.0,16.0,13.0,14.0,1.0,24.0,11.0,16.0,2.0,21.0,30.0,26.0,11.0,15.0,23.0,22.0,4.0,3.0,34.0,28.0,20.0,13.0,34.0,19.0,3.0,22.0,25.0,0.0,22.0,0.0,25.0,14.0,22.0,44.0,6.0,2.0,12.0,12.0,2.0,7.0,10.0,8.0,0.0,14.0,15.0,28.0,45.0,16.0,2.0,17.0,71.0,35.0,9.0,20.0,57.0,21.0,22.0,18.0,3.0,30.0,176.0,32.0,58.0,4.0,16.0,7.0,63.0,64.0,54.0,14.0,36.0,100.0,24.0,135.0,3.0,82.0,97.0,116.0,7.0,217.0,93.0,2.0,273.0
+5.0,8.0,0.0,7.0,9.0,11.0,5.0,3.0,20.0,15.0,17.0,8.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,1.0,10.0,0.0,12.0,4.0,10.0,4.0,13.0,14.0,0.0,12.0,8.0,1.0,2.0,1.0,6.0,2.0,29.0,1.0,11.0,11.0,5.0,1.0,5.0,2.0,0.0,18.0,0.0,0.0,9.0,0.0,26.0,4.0,9.0,40.0,10.0,1.0,5.0,4.0,5.0,0.0,13.0,9.0,3.0,12.0,14.0,0.0,6.0,24.0,27.0,1.0,19.0,13.0,9.0,10.0,14.0,1.0,8.0,2.0,23.0,27.0,5.0,1.0,52.0,18.0,50.0,14.0,20.0,25.0,8.0,9.0,45.0,1.0,28.0,40.0,42.0,1.0,25.0,40.0,3.0,43.0
+22.0,9.0,0.0,3.0,10.0,6.0,15.0,2.0,4.0,20.0,9.0,15.0,0.0,0.0,1.0,11.0,4.0,0.0,10.0,5.0,5.0,0.0,5.0,0.0,35.0,13.0,9.0,4.0,0.0,9.0,15.0,0.0,4.0,0.0,1.0,0.0,17.0,0.0,4.0,12.0,2.0,0.0,13.0,2.0,0.0,15.0,0.0,0.0,17.0,6.0,28.0,3.0,8.0,2.0,2.0,1.0,1.0,5.0,2.0,0.0,8.0,11.0,0.0,13.0,5.0,0.0,4.0,8.0,27.0,0.0,20.0,16.0,1.0,9.0,16.0,3.0,19.0,5.0,16.0,14.0,5.0,3.0,51.0,42.0,41.0,23.0,12.0,17.0,2.0,2.0,7.0,1.0,66.0,68.0,79.0,3.0,14.0,41.0,2.0,74.0
+15.0,14.0,0.0,14.0,22.0,11.0,3.0,2.0,19.0,7.0,9.0,9.0,1.0,2.0,1.0,9.0,3.0,1.0,4.0,5.0,19.0,11.0,7.0,0.0,11.0,7.0,11.0,12.0,0.0,8.0,10.0,3.0,12.0,1.0,5.0,3.0,8.0,0.0,14.0,11.0,8.0,0.0,8.0,9.0,0.0,14.0,1.0,1.0,18.0,0.0,11.0,1.0,8.0,43.0,11.0,3.0,0.0,12.0,7.0,0.0,12.0,11.0,7.0,25.0,10.0,0.0,10.0,18.0,11.0,0.0,11.0,12.0,10.0,13.0,10.0,2.0,18.0,3.0,20.0,25.0,0.0,7.0,28.0,34.0,43.0,11.0,18.0,18.0,20.0,7.0,79.0,0.0,41.0,28.0,35.0,0.0,42.0,50.0,1.0,48.0
+7.0,3.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,1.0,0.0,3.0,2.0,0.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,4.0,0.0,1.0,1.0,0.0,4.0,0.0,1.0,0.0,0.0,2.0,1.0,1.0,4.0,1.0,0.0,2.0,0.0,0.0,5.0,0.0,0.0,2.0,1.0,9.0,3.0,0.0,2.0,2.0,3.0,0.0,5.0,2.0,2.0,1.0,6.0,0.0,0.0,7.0,11.0,0.0,3.0,1.0,4.0,3.0,4.0,1.0,7.0,0.0,3.0,8.0,0.0,0.0,8.0,5.0,14.0,5.0,5.0,10.0,20.0,0.0,14.0,0.0,5.0,9.0,17.0,0.0,18.0,9.0,0.0,9.0
+2.0,0.0,0.0,0.0,3.0,6.0,4.0,0.0,2.0,2.0,0.0,0.0,4.0,0.0,0.0,6.0,2.0,0.0,0.0,3.0,0.0,0.0,1.0,0.0,1.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,3.0,1.0,0.0,1.0,7.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,4.0,1.0,3.0,0.0,2.0,6.0,6.0,0.0,1.0,1.0,1.0,0.0,1.0,2.0,1.0,3.0,2.0,0.0,0.0,3.0,6.0,2.0,3.0,2.0,1.0,4.0,2.0,0.0,3.0,1.0,2.0,9.0,0.0,3.0,3.0,19.0,9.0,1.0,2.0,6.0,9.0,0.0,7.0,0.0,6.0,7.0,9.0,1.0,13.0,5.0,0.0,13.0
+0.0,3.0,0.0,4.0,2.0,2.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,4.0,0.0,3.0,6.0,1.0,0.0,0.0,2.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,2.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,5.0,0.0,1.0,3.0,1.0,0.0,1.0,0.0,1.0,1.0,2.0,1.0,1.0,4.0,4.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,6.0,9.0,0.0,9.0,0.0,4.0,3.0,0.0,0.0,16.0,0.0,4.0,27.0,0.0,28.0,2.0,0.0,0.0,0.0,3.0,3.0,13.0,1.0,0.0,16.0,0.0,0.0
+7.0,17.0,0.0,28.0,5.0,15.0,8.0,6.0,24.0,6.0,14.0,7.0,0.0,1.0,2.0,8.0,11.0,2.0,5.0,9.0,13.0,8.0,15.0,2.0,16.0,11.0,13.0,25.0,0.0,24.0,9.0,4.0,5.0,2.0,1.0,1.0,6.0,0.0,18.0,42.0,6.0,0.0,9.0,5.0,0.0,4.0,0.0,4.0,2.0,0.0,12.0,1.0,2.0,5.0,3.0,0.0,5.0,5.0,1.0,3.0,9.0,4.0,2.0,28.0,12.0,0.0,3.0,11.0,8.0,1.0,45.0,12.0,3.0,86.0,92.0,0.0,67.0,0.0,31.0,23.0,1.0,0.0,79.0,17.0,43.0,128.0,10.0,186.0,30.0,4.0,2.0,3.0,18.0,28.0,86.0,1.0,34.0,14.0,0.0,45.0
+2.0,10.0,0.0,10.0,14.0,12.0,4.0,3.0,10.0,6.0,2.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,6.0,0.0,1.0,14.0,2.0,2.0,10.0,5.0,13.0,0.0,8.0,0.0,0.0,1.0,0.0,4.0,2.0,8.0,0.0,3.0,21.0,1.0,0.0,4.0,6.0,12.0,11.0,1.0,5.0,1.0,2.0,5.0,1.0,1.0,3.0,3.0,1.0,3.0,1.0,1.0,1.0,2.0,1.0,7.0,21.0,4.0,0.0,1.0,3.0,1.0,1.0,45.0,13.0,1.0,64.0,57.0,0.0,43.0,0.0,14.0,11.0,2.0,4.0,85.0,4.0,14.0,80.0,11.0,134.0,13.0,0.0,0.0,0.0,7.0,23.0,25.0,0.0,42.0,5.0,0.0,10.0
+4.0,16.0,0.0,14.0,14.0,11.0,3.0,2.0,4.0,2.0,5.0,5.0,0.0,0.0,2.0,4.0,10.0,0.0,4.0,6.0,5.0,4.0,11.0,2.0,11.0,13.0,12.0,15.0,0.0,6.0,8.0,0.0,9.0,0.0,6.0,1.0,6.0,0.0,13.0,19.0,0.0,1.0,9.0,1.0,0.0,4.0,2.0,0.0,1.0,2.0,11.0,3.0,8.0,7.0,3.0,1.0,9.0,8.0,2.0,0.0,4.0,1.0,1.0,21.0,6.0,0.0,1.0,8.0,5.0,0.0,20.0,10.0,2.0,63.0,57.0,0.0,44.0,1.0,20.0,7.0,5.0,1.0,55.0,7.0,28.0,105.0,13.0,141.0,4.0,2.0,3.0,1.0,15.0,23.0,50.0,1.0,13.0,8.0,0.0,9.0
+0.0,6.0,0.0,4.0,4.0,9.0,2.0,2.0,10.0,3.0,7.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,0.0,1.0,4.0,7.0,4.0,5.0,3.0,5.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,12.0,3.0,6.0,7.0,6.0,0.0,0.0,2.0,1.0,4.0,0.0,7.0,3.0,1.0,3.0,0.0,1.0,1.0,0.0,0.0,4.0,2.0,1.0,1.0,2.0,4.0,3.0,14.0,2.0,0.0,0.0,2.0,1.0,0.0,7.0,2.0,1.0,27.0,19.0,0.0,17.0,0.0,5.0,6.0,0.0,0.0,24.0,6.0,11.0,39.0,4.0,58.0,11.0,1.0,0.0,0.0,5.0,6.0,20.0,0.0,35.0,4.0,0.0,7.0
+5.0,11.0,0.0,15.0,5.0,6.0,3.0,2.0,11.0,6.0,9.0,1.0,0.0,2.0,0.0,4.0,0.0,0.0,3.0,6.0,1.0,3.0,7.0,20.0,8.0,12.0,14.0,16.0,0.0,10.0,4.0,1.0,2.0,2.0,2.0,7.0,9.0,8.0,2.0,21.0,11.0,1.0,3.0,15.0,0.0,17.0,1.0,8.0,7.0,4.0,19.0,1.0,7.0,6.0,4.0,0.0,3.0,1.0,2.0,5.0,3.0,1.0,9.0,27.0,15.0,0.0,1.0,10.0,6.0,0.0,36.0,11.0,3.0,94.0,54.0,0.0,45.0,0.0,15.0,11.0,1.0,3.0,80.0,7.0,13.0,102.0,8.0,169.0,41.0,1.0,0.0,1.0,14.0,16.0,51.0,0.0,95.0,6.0,0.0,14.0
+15.0,16.0,0.0,11.0,9.0,12.0,1.0,1.0,13.0,8.0,4.0,4.0,1.0,1.0,1.0,2.0,4.0,1.0,3.0,4.0,6.0,9.0,18.0,3.0,7.0,15.0,9.0,17.0,0.0,3.0,3.0,0.0,6.0,4.0,6.0,1.0,9.0,0.0,6.0,20.0,2.0,0.0,4.0,3.0,0.0,9.0,1.0,1.0,2.0,1.0,12.0,3.0,3.0,6.0,2.0,0.0,3.0,2.0,3.0,1.0,4.0,1.0,4.0,22.0,13.0,0.0,3.0,7.0,11.0,0.0,30.0,15.0,2.0,76.0,51.0,0.0,48.0,1.0,14.0,15.0,2.0,0.0,66.0,18.0,28.0,81.0,8.0,127.0,9.0,3.0,3.0,1.0,16.0,13.0,56.0,1.0,31.0,12.0,0.0,17.0
+6.0,16.0,0.0,34.0,9.0,25.0,18.0,4.0,20.0,11.0,10.0,2.0,0.0,3.0,3.0,4.0,4.0,3.0,0.0,9.0,0.0,9.0,16.0,3.0,9.0,14.0,11.0,32.0,0.0,9.0,3.0,4.0,0.0,2.0,1.0,1.0,12.0,0.0,13.0,47.0,6.0,0.0,7.0,22.0,3.0,2.0,2.0,1.0,6.0,3.0,7.0,1.0,1.0,5.0,3.0,0.0,2.0,3.0,3.0,0.0,4.0,4.0,1.0,29.0,12.0,0.0,0.0,26.0,9.0,0.0,30.0,2.0,5.0,94.0,54.0,0.0,46.0,0.0,29.0,11.0,2.0,0.0,69.0,1.0,30.0,106.0,13.0,142.0,37.0,2.0,0.0,1.0,8.0,48.0,71.0,2.0,81.0,14.0,0.0,5.0
+3.0,4.0,0.0,10.0,8.0,4.0,0.0,0.0,8.0,0.0,4.0,0.0,0.0,1.0,1.0,3.0,2.0,0.0,0.0,1.0,0.0,4.0,12.0,0.0,2.0,9.0,5.0,12.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,2.0,5.0,12.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,15.0,9.0,0.0,0.0,3.0,4.0,0.0,12.0,1.0,0.0,28.0,19.0,0.0,12.0,0.0,7.0,8.0,1.0,0.0,42.0,2.0,16.0,46.0,2.0,60.0,3.0,0.0,1.0,1.0,4.0,17.0,16.0,0.0,19.0,3.0,0.0,4.0
+0.0,21.0,0.0,17.0,4.0,7.0,3.0,2.0,16.0,8.0,6.0,3.0,3.0,3.0,2.0,2.0,5.0,8.0,4.0,6.0,0.0,9.0,16.0,2.0,1.0,12.0,7.0,26.0,0.0,8.0,5.0,0.0,2.0,0.0,1.0,0.0,8.0,0.0,5.0,29.0,4.0,1.0,6.0,2.0,1.0,3.0,0.0,1.0,5.0,4.0,6.0,2.0,3.0,5.0,2.0,0.0,5.0,1.0,3.0,7.0,0.0,5.0,1.0,26.0,16.0,0.0,0.0,12.0,13.0,0.0,26.0,3.0,3.0,81.0,49.0,0.0,46.0,0.0,15.0,14.0,1.0,2.0,70.0,0.0,39.0,89.0,4.0,134.0,11.0,1.0,0.0,0.0,5.0,17.0,73.0,0.0,11.0,2.0,0.0,3.0
+1.0,8.0,0.0,5.0,0.0,6.0,1.0,0.0,8.0,0.0,6.0,1.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,2.0,0.0,7.0,0.0,1.0,6.0,2.0,3.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,3.0,3.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,1.0,4.0,0.0,0.0,0.0,0.0,4.0,0.0,14.0,0.0,0.0,22.0,26.0,0.0,9.0,0.0,5.0,9.0,0.0,0.0,17.0,0.0,10.0,17.0,0.0,34.0,6.0,0.0,0.0,0.0,2.0,2.0,8.0,0.0,13.0,1.0,0.0,1.0
+0.0,11.0,0.0,2.0,0.0,3.0,0.0,0.0,5.0,1.0,5.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,0.0,6.0,7.0,3.0,0.0,3.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,5.0,1.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,0.0,0.0,1.0,0.0,0.0,7.0,0.0,0.0,8.0,11.0,0.0,4.0,0.0,2.0,0.0,0.0,0.0,13.0,0.0,2.0,16.0,0.0,20.0,5.0,0.0,0.0,0.0,0.0,1.0,3.0,0.0,18.0,0.0,0.0,2.0
+1.0,6.0,0.0,6.0,4.0,4.0,0.0,0.0,16.0,2.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,15.0,0.0,0.0,9.0,6.0,10.0,0.0,3.0,0.0,0.0,4.0,0.0,0.0,0.0,4.0,0.0,0.0,17.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,5.0,0.0,0.0,0.0,0.0,3.0,0.0,11.0,0.0,1.0,17.0,11.0,0.0,18.0,0.0,6.0,9.0,0.0,0.0,19.0,0.0,8.0,19.0,0.0,30.0,5.0,0.0,0.0,0.0,3.0,1.0,7.0,0.0,17.0,2.0,0.0,3.0
+1.0,9.0,0.0,8.0,5.0,14.0,3.0,0.0,11.0,6.0,11.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,15.0,2.0,1.0,13.0,12.0,4.0,0.0,3.0,5.0,0.0,0.0,0.0,2.0,1.0,1.0,0.0,0.0,12.0,2.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,5.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,5.0,2.0,15.0,2.0,0.0,0.0,0.0,3.0,0.0,20.0,0.0,0.0,18.0,15.0,0.0,12.0,0.0,5.0,2.0,0.0,0.0,28.0,0.0,10.0,20.0,1.0,28.0,13.0,0.0,0.0,0.0,6.0,9.0,14.0,0.0,17.0,3.0,0.0,0.0
+2.0,4.0,8.0,8.0,6.0,9.0,3.0,0.0,6.0,8.0,3.0,3.0,0.0,4.0,0.0,1.0,0.0,15.0,0.0,0.0,0.0,0.0,5.0,8.0,5.0,4.0,4.0,7.0,0.0,11.0,2.0,2.0,0.0,0.0,2.0,8.0,9.0,0.0,1.0,10.0,7.0,0.0,4.0,6.0,0.0,4.0,0.0,3.0,4.0,1.0,9.0,0.0,3.0,15.0,3.0,0.0,1.0,1.0,5.0,5.0,3.0,2.0,7.0,11.0,6.0,0.0,0.0,5.0,4.0,0.0,29.0,35.0,0.0,53.0,50.0,0.0,27.0,0.0,25.0,3.0,1.0,3.0,39.0,7.0,12.0,53.0,9.0,68.0,45.0,1.0,10.0,0.0,16.0,17.0,16.0,0.0,71.0,11.0,0.0,13.0
+5.0,4.0,0.0,4.0,13.0,6.0,7.0,2.0,11.0,3.0,2.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,13.0,6.0,1.0,3.0,7.0,7.0,0.0,7.0,12.0,4.0,0.0,1.0,2.0,5.0,5.0,0.0,6.0,9.0,4.0,0.0,4.0,11.0,0.0,9.0,0.0,0.0,3.0,1.0,11.0,0.0,1.0,3.0,3.0,0.0,1.0,2.0,0.0,0.0,6.0,1.0,3.0,16.0,1.0,0.0,3.0,4.0,1.0,0.0,26.0,7.0,1.0,17.0,20.0,0.0,17.0,2.0,20.0,3.0,0.0,3.0,66.0,2.0,8.0,18.0,4.0,41.0,29.0,0.0,1.0,2.0,9.0,8.0,18.0,0.0,66.0,5.0,0.0,6.0
diff --git a/pyPLNmodels/VEM.py b/pyPLNmodels/VEM.py
deleted file mode 100644
index 2dcbcc9804df8da27d7834ac3b30b84e0312e21a..0000000000000000000000000000000000000000
--- a/pyPLNmodels/VEM.py
+++ /dev/null
@@ -1,769 +0,0 @@
-import time
-from abc import ABC, abstractmethod
-import pickle
-
-import torch
-import numpy as np
-import seaborn as sns
-import matplotlib.pyplot as plt
-from sklearn.decomposition import PCA
-
-from ._closed_forms import closed_formula_beta, closed_formula_Sigma, closed_formula_pi
-from .elbos import ELBOPLNPCA, ELBOZIPLN, profiledELBOPLN
-from ._utils import (
-    PLNPlotArgs,
-    init_sigma,
-    init_c,
-    init_beta,
-    get_offsets_from_sum_of_counts,
-    check_dimensions_are_equal,
-    init_M,
-    format_data,
-    check_parameters_shape,
-    extract_cov_offsets_offsetsformula,
-    nice_string_of_dict,
-    plot_ellipse,
-)
-
-if torch.cuda.is_available():
-    DEVICE = "cuda"
-    print("Using a GPU")
-else:
-    DEVICE = "cpu"
-# shoudl add a good init for M. for plnpca we should not put the maximum of the log posterior, for plnpca it may be ok.
-
-
-class _PLN(ABC):
-    """
-    Virtual class for all the PLN models.
-
-    This class must be derivatived. The methods `get_Sigma`, `compute_elbo`,
-    `random_init_var_parameters` and `list_of_parameters_needing_gradient` must
-    be defined.
-    """
-
-    WINDOW = 3
-
-    def __init__(self):
-        """
-        Simple initialization method.
-        """
-        self.WINDOW = 3
-        self._fitted = False
-        self.plotargs = PLNPlotArgs(self.WINDOW)
-
-    def format_datas(self, counts, covariates, offsets, offsets_formula):
-        self.counts = format_data(counts)
-        if covariates is None:
-            self.covariates = torch.full(
-                (self.counts.shape[0], 1), 1, device=DEVICE
-            ).double()
-        else:
-            self.covariates = format_data(covariates)
-        if offsets is None:
-            if offsets_formula == "sum":
-                print("Setting the offsets offsets as the log of the sum of counts")
-                self.offsets = (
-                    torch.log(get_offsets_from_sum_of_counts(self.counts))
-                    .double()
-                    .to(DEVICE)
-                )
-            else:
-                self.offsets = torch.zeros(self.counts.shape, device=DEVICE)
-        else:
-            self.offsets = format_data(offsets).to(DEVICE)
-        self._n, self._p = self.counts.shape
-        self._d = self.covariates.shape[1]
-
-    @property
-    def n(self):
-        return self._n
-
-    @property
-    def p(self):
-        return self._p
-
-    @property
-    def d(self):
-        return self._d
-
-    def smart_init_beta(self):
-        self._beta = init_beta(self.counts, self.covariates, self.offsets)
-
-    def random_init_beta(self):
-        self._beta = torch.randn((self._d, self._p), device=DEVICE)
-
-    @abstractmethod
-    def random_init_model_parameters(self):
-        pass
-
-    @abstractmethod
-    def smart_init_model_parameters(self):
-        pass
-
-    @abstractmethod
-    def random_init_var_parameters(self):
-        pass
-
-    def smart_init_var_parameters(self):
-        pass
-
-    def init_parameters(self, do_smart_init):
-        print("Initialization ...")
-        if do_smart_init:
-            self.smart_init_model_parameters()
-            self.smart_init_var_parameters()
-        else:
-            self.random_init_model_parameters()
-            self.random_init_var_parameters()
-        print("Initialization finished")
-        self.put_parameters_to_device()
-
-    def put_parameters_to_device(self):
-        for parameter in self.list_of_parameters_needing_gradient:
-            parameter.requires_grad_(True)
-
-    @property
-    def list_of_parameters_needing_gradient(self):
-        """
-        A list containing all the parameters that needs to be upgraded via a gradient step.
-        """
-
-    def fit(
-        self,
-        counts,
-        covariates=None,
-        offsets=None,
-        nb_max_iteration=50000,
-        lr=0.01,
-        class_optimizer=torch.optim.Rprop,
-        tol=1e-3,
-        do_smart_init=True,
-        verbose=False,
-        offsets_formula="sum",
-        keep_going=False,
-    ):
-        """
-        Main function of the class. Fit a PLN to the data.
-        Parameters
-        ----------
-        counts : torch.tensor or ndarray or DataFrame.
-            2-d count data.
-        covariates : torch.tensor or ndarray or DataFrame or
-            None, default = None
-            If not `None`, the first dimension should equal the first
-            dimension of `counts`.
-        offsets : torch.tensor or ndarray or DataFrame or None, default = None
-            Model offset. If not `None`, size should be the same as `counts`.
-        """
-        self.beginnning_time = time.time()
-        if keep_going is False:
-            self.format_datas(counts, covariates, offsets, offsets_formula)
-            check_parameters_shape(self.counts, self.covariates, self.offsets)
-            self.init_parameters(do_smart_init)
-        if self._fitted is True and keep_going is True:
-            self.beginnning_time -= self.plotargs.running_times[-1]
-        self.optim = class_optimizer(self.list_of_parameters_needing_gradient, lr=lr)
-        nb_iteration_done = 0
-        stop_condition = False
-        while nb_iteration_done < nb_max_iteration and stop_condition == False:
-            nb_iteration_done += 1
-            loss = self.trainstep()
-            criterion = self.compute_criterion_and_update_plotargs(loss, tol)
-            if abs(criterion) < tol:
-                stop_condition = True
-            if verbose and nb_iteration_done % 50 == 0:
-                self.print_stats()
-        self.print_end_of_fitting_message(stop_condition, tol)
-        self._fitted = True
-
-    def trainstep(self):
-        """
-        simple docstrings with black errors
-        """
-        self.optim.zero_grad()
-        loss = -self.compute_elbo()
-        loss.backward()
-        self.optim.step()
-        self.update_closed_forms()
-        return loss
-
-    def print_end_of_fitting_message(self, stop_condition, tol):
-        if stop_condition:
-            print(
-                f"Tolerance {tol} reached in {self.plotargs.iteration_number} iterations"
-            )
-        else:
-            print(
-                "Maximum number of iterations reached : ",
-                self.plotargs.iteration_number,
-                "last criterion = ",
-                np.round(self.plotargs.criterions[-1], 8),
-            )
-
-    def print_stats(self):
-        print("-------UPDATE-------")
-        print("Iteration number: ", self.plotargs.iteration_number)
-        print("Criterion: ", np.round(self.plotargs.criterions[-1], 8))
-        print("ELBO:", np.round(self.plotargs.elbos_list[-1], 6))
-
-    def compute_criterion_and_update_plotargs(self, loss, tol):
-        self.plotargs.elbos_list.append(-loss.item() / self._n)
-        self.plotargs.running_times.append(time.time() - self.beginnning_time)
-        if self.plotargs.iteration_number > self.WINDOW:
-            criterion = abs(
-                self.plotargs.elbos_list[-1]
-                - self.plotargs.elbos_list[-1 - self.WINDOW]
-            )
-            self.plotargs.criterions.append(criterion)
-            return criterion
-        return tol
-
-    def update_closed_forms(self):
-        pass
-
-    @abstractmethod
-    def compute_elbo(self):
-        """
-        Compute the Evidence Lower BOund (ELBO) that will be maximized by pytorch.
-        """
-
-    def display_Sigma(self, ax=None, savefig=False, name_file=""):
-        """
-        Display a heatmap of Sigma to visualize correlations.
-
-        If Sigma is too big (size is > 400), will only display the first block
-        of size (400,400).
-
-        Parameters
-        ----------
-        ax : matplotlib Axes, optional
-            Axes in which to draw the plot, otherwise use the currently-active Axes.
-        savefig: bool, optional
-            If True the figure will be saved. Default is False.
-        name_file : str, optional
-            The name of the file the graphic will be saved to if saved.
-            Default is an empty string.
-        """
-        sigma = self.Sigma
-        if self._p > 400:
-            sigma = sigma[:400, :400]
-        sns.heatmap(sigma, ax=ax)
-        if savefig:
-            plt.savefig(name_file + self.NAME)
-        plt.show()  # to avoid displaying a blanck screen
-
-    def __str__(self):
-        string = f"A multivariate Poisson Lognormal with {self.description}"
-        string += nice_string_of_dict(self.dict_for_printing)
-        return string
-
-    def show(self, axes=None):
-        print("Best likelihood:", np.max(-self.plotargs.elbos_list[-1]))
-        if axes is None:
-            _, axes = plt.subplots(1, 3, figsize=(23, 5))
-        self.plotargs.show_loss(ax=axes[-3])
-        self.plotargs.show_stopping_criterion(ax=axes[-2])
-        self.display_Sigma(ax=axes[-1])
-        plt.show()
-
-    @property
-    def elbos_list(self):
-        return self.plotargs.elbos_list
-
-    @property
-    def loglike(self):
-        if self._fitted is False:
-            raise AttributeError(
-                "The model is not fitted so that it did not" "computed likelihood"
-            )
-        return self._n * self.elbos_list[-1]
-
-    @property
-    def BIC(self):
-        return -self.loglike + self.number_of_parameters / 2 * np.log(self._n)
-
-    @property
-    def AIC(self):
-        return -self.loglike + self.number_of_parameters
-
-    @property
-    def dict_var_parameters(self):
-        return {"S": self._S, "M": self._M}
-
-    @property
-    def dict_model_parameters(self):
-        return {"beta": self._beta, "Sigma": self.Sigma}
-
-    @property
-    def dict_data(self):
-        return {
-            "counts": self.counts,
-            "covariates": self.covariates,
-            "offsets": self.offsets,
-        }
-
-    @property
-    def model_in_a_dict(self):
-        return self.dict_data | self.dict_model_parameters | self.dict_var_parameters
-
-    @property
-    def Sigma(self):
-        return self._Sigma.detach().cpu()
-
-    @property
-    def beta(self):
-        return self._beta.detach().cpu()
-
-    @property
-    def M(self):
-        return self._M.detach().cpu()
-
-    @property
-    def S(self):
-        return self._S.detach().cpu()
-
-    def save_model(self, filename):
-        with open(filename, "wb") as fp:
-            pickle.dump(self.model_in_a_dict, fp)
-
-    def load_model_from_file(self, path_of_file):
-        with open(path_of_file, "rb") as fp:
-            model_in_a_dict = pickle.load(fp)
-        self.model_in_a_dict = model_in_a_dict
-        self._fitted = True
-
-    @model_in_a_dict.setter
-    def model_in_a_dict(self, model_in_a_dict):
-        self.set_data_from_dict(model_in_a_dict)
-        self.set_parameters_from_dict(model_in_a_dict)
-
-    def set_data_from_dict(self, model_in_a_dict):
-        counts = model_in_a_dict["counts"]
-        covariates, offsets, offsets_formula = extract_cov_offsets_offsetsformula(
-            model_in_a_dict
-        )
-        self.format_datas(counts, covariates, offsets, offsets_formula)
-        check_parameters_shape(self.counts, self.covariates, self.offsets)
-        self.counts = counts
-        self.covariates = covariates
-        self.offsets = offsets
-
-    @abstractmethod
-    def set_parameters_from_dict(self, model_in_a_dict):
-        pass
-
-    @property
-    def dict_for_printing(self):
-        return {
-            "Loglike": np.round(self.loglike, 2),
-            "dimension": self._p,
-            "nb param": int(self.number_of_parameters),
-        }
-
-
-# need to do a good init for M and S
-class PLN(_PLN):
-    NAME = "PLN"
-
-    @property
-    def description(self):
-        return "full covariance model."
-
-    def smart_init_var_parameters(self):
-        self.random_init_var_parameters()
-
-    def random_init_var_parameters(self):
-        self._S = 1 / 2 * torch.ones((self._n, self._p)).to(DEVICE)
-        self._M = torch.ones((self._n, self._p)).to(DEVICE)
-
-    @property
-    def list_of_parameters_needing_gradient(self):
-        return [self._M, self._S]
-
-    def compute_elbo(self):
-        """
-        Compute the Evidence Lower BOund (ELBO) that will be
-        maximized by pytorch. Here we use the profiled ELBO
-        for the full covariance matrix.
-        """
-        return profiledELBOPLN(
-            self.counts, self.covariates, self.offsets, self._M, self._S
-        )
-
-    def smart_init_model_parameters(self):
-        # no model parameters since we are doing a profiled ELBO
-        pass
-
-    def random_init_model_parameters(self):
-        # no model parameters since we are doing a profiled ELBO
-        pass
-
-    @property
-    def _beta(self):
-        return closed_formula_beta(self.covariates, self._M)
-
-    @property
-    def beta(self):
-        return self._beta.detach().cpu()
-
-    @property
-    def _Sigma(self):
-        return closed_formula_Sigma(
-            self.covariates, self._M, self._S, self._beta, self._n
-        )
-
-    @property
-    def Sigma(self):
-        return self._Sigma.detach().cpu()
-
-    def set_parameters_from_dict(self, model_in_a_dict):
-        S = format_data(model_in_a_dict["S"])
-        nS, pS = S.shape
-        M = format_data(model_in_a_dict["M"])
-        nM, pM = M.shape
-        beta = format_data(model_in_a_dict["beta"])
-        _, pbeta = beta.shape
-        Sigma = format_data(model_in_a_dict["Sigma"])
-        pSigma1, pSigma2 = Sigma.shape
-        check_dimensions_are_equal("Sigma", "Sigma.t", pSigma1, pSigma2, 0)
-        check_dimensions_are_equal("S", "M", nS, nM, 0)
-        check_dimensions_are_equal("S", "M", pS, pM, 1)
-        check_dimensions_are_equal("Sigma", "beta", pSigma1, pbeta, 1)
-        check_dimensions_are_equal("M", "beta", pM, pbeta, 1)
-        self._S = S
-        self._M = M
-        self._beta = beta
-        self._Sigma = Sigma
-
-    @property
-    def latent_variables(self):
-        return self.M
-
-    @property
-    def number_of_parameters(self):
-        return self._p * (self._p + self._d)
-
-
-class PLNPCA:
-    def __init__(self, ranks):
-        if isinstance(ranks, list):
-            self.ranks = ranks
-            self.dict_models = {}
-            for rank in ranks:
-                if isinstance(rank, int):
-                    self.dict_models[rank] = _PLNPCA(rank)
-                else:
-                    TypeError("Please instantiate with either a list of integers.")
-        elif isinstance(ranks, int):
-            self.ranks = [ranks]
-            self.dict_models = {ranks: _PLNPCA(ranks)}
-        else:
-            raise TypeError(
-                "Please instantiate with either a list of integer or an integer"
-            )
-
-    @property
-    def models(self):
-        return list(self.dict_models.values())
-
-    def fit(
-        self,
-        counts,
-        covariates=None,
-        offsets=None,
-        nb_max_iteration=100000,
-        lr=0.01,
-        class_optimizer=torch.optim.Rprop,
-        tol=1e-3,
-        do_smart_init=True,
-        verbose=False,
-        offsets_formula="sum",
-    ):
-        for pca in self.dict_models.values():
-            pca.fit(
-                counts,
-                covariates,
-                offsets,
-                nb_max_iteration,
-                lr,
-                class_optimizer,
-                tol,
-                do_smart_init,
-                verbose,
-                offsets_formula,
-            )
-
-    def __getitem__(self, rank):
-        return self.dict_models[rank]
-
-    @property
-    def BIC(self):
-        return {
-            model._rank: np.round(model.BIC, 3) for model in self.dict_models.values()
-        }
-
-    @property
-    def AIC(self):
-        return {
-            model._rank: np.round(model.AIC, 3) for model in self.dict_models.values()
-        }
-
-    @property
-    def loglikes(self):
-        return {model._rank: model.loglike for model in self.dict_models.values()}
-
-    def show(self):
-        bic = self.BIC
-        aic = self.AIC
-        loglikes = self.loglikes
-        bic_color = "blue"
-        aic_color = "red"
-        loglikes_color = "orange"
-        plt.scatter(bic.keys(), bic.values(), label="BIC criterion", c=bic_color)
-        plt.plot(bic.keys(), bic.values(), c=bic_color)
-        plt.scatter(aic.keys(), aic.values(), label="AIC criterion", c=aic_color)
-        plt.plot(aic.keys(), aic.values(), c=aic_color)
-        plt.scatter(
-            loglikes.keys(),
-            -np.array(list(loglikes.values())),
-            label="Negative loglike",
-            c=loglikes_color,
-        )
-        plt.plot(loglikes.keys(), -np.array(list(loglikes.values())), c=loglikes_color)
-        plt.legend()
-        plt.show()
-
-    def best_model(self, criterion="AIC"):
-        if criterion == "BIC":
-            return self[self.ranks[np.argmin(list(self.BIC.values()))]]
-        elif criterion == "AIC":
-            return self[self.ranks[np.argmin(list(self.AIC.values()))]]
-
-    def save_model(self, rank, filename):
-        self.dict_models[rank].save_model(filename)
-        with open(filename, "wb") as fp:
-            pickle.dump(self.model_in_a_dict, fp)
-
-    def save_models(self, filename):
-        for model in self.models:
-            model_filename = filename + str(model._rank)
-            model.save_model(model_filename)
-
-    @property
-    def _p(self):
-        return self[self.ranks[0]].p
-
-    def __str__(self):
-        nb_models = len(self.models)
-        to_print = (
-            f"Collection of {nb_models} PLNPCA models with {self._p} variables.\n"
-        )
-        to_print += f"Ranks considered:{self.ranks} \n \n"
-        to_print += f"BIC metric:{self.BIC}\n"
-        to_print += (
-            f"Best model (lower BIC):{self.best_model(criterion = 'BIC')._rank}\n \n"
-        )
-        to_print += f"AIC metric:{self.AIC}\n"
-        to_print += (
-            f"Best model (lower AIC):{self.best_model(criterion = 'AIC')._rank}\n"
-        )
-        return to_print
-
-    def load_model_from_file(self, rank, path_of_file):
-        with open(path_of_file, "rb") as fp:
-            model_in_a_dict = pickle.load(fp)
-        rank = model_in_a_dict["rank"]
-        self.dict_models[rank].model_in_a_dict = model_in_a_dict
-
-
-class _PLNPCA(_PLN):
-    NAME = "PLNPCA"
-
-    def __init__(self, rank):
-        super().__init__()
-        self._rank = rank
-
-    @property
-    def dict_model_parameters(self):
-        dict_model_parameters = super().dict_model_parameters
-        dict_model_parameters.pop("Sigma")
-        dict_model_parameters["C"] = self._C
-        return dict_model_parameters
-
-    def smart_init_model_parameters(self):
-        super().smart_init_beta()
-        self._C = init_c(
-            self.counts, self.covariates, self.offsets, self._beta, self._rank
-        )
-
-    def random_init_model_parameters(self):
-        super().random_init_beta()
-        self._C = torch.randn((self._p, self._rank)).to(DEVICE)
-
-    def random_init_var_parameters(self):
-        self._S = 1 / 2 * torch.ones((self._n, self._rank)).to(DEVICE)
-        self._M = torch.ones((self._n, self._rank)).to(DEVICE)
-
-    def smart_init_var_parameters(self):
-        self._M = (
-            init_M(self.counts, self.covariates, self.offsets, self._beta, self._C)
-            .to(DEVICE)
-            .detach()
-        )
-        self._S = 1 / 2 * torch.ones((self._n, self._rank)).to(DEVICE)
-        self._M.requires_grad_(True)
-        self._S.requires_grad_(True)
-
-    @property
-    def list_of_parameters_needing_gradient(self):
-        return [self._C, self._beta, self._M, self._S]
-
-    def compute_elbo(self):
-        return ELBOPLNPCA(
-            self.counts,
-            self.covariates,
-            self.offsets,
-            self._M,
-            self._S,
-            self._C,
-            self._beta,
-        )
-
-    @property
-    def number_of_parameters(self):
-        return self._p * (self._d + self._rank) - self._rank * (self._rank - 1) / 2
-
-    def set_parameters_from_dict(self, model_in_a_dict):
-        S = format_data(model_in_a_dict["S"])
-        nS, qS = S.shape
-        M = format_data(model_in_a_dict["M"])
-        nM, qM = M.shape
-        beta = format_data(model_in_a_dict["beta"])
-        _, pbeta = beta.shape
-        C = format_data(model_in_a_dict["C"])
-        pC, qC = C.shape
-        check_dimensions_are_equal("S", "M", nS, nM, 0)
-        check_dimensions_are_equal("S", "M", qS, qM, 1)
-        check_dimensions_are_equal("C.t", "beta", pC, pbeta, 1)
-        check_dimensions_are_equal("M", "C", qM, qC, 1)
-        self._S = S.to(DEVICE)
-        self._M = M.to(DEVICE)
-        self._beta = beta.to(DEVICE)
-        self._C = C.to(DEVICE)
-
-    @property
-    def Sigma(self):
-        return torch.matmul(self._C, self._C.T).detach().cpu()
-
-    @property
-    def description(self):
-        return f" with {self._rank} principal component."
-
-    @property
-    def latent_variables(self):
-        return torch.matmul(self._M, self._C.T).detach()
-
-    def get_projected_latent_variables(self, nb_dim=None):
-        if nb_dim is None:
-            nb_dim = self._rank
-        if nb_dim > self._rank:
-            raise AttributeError(
-                f"The number of dimension {nb_dim} is larger than the rank {self._rank}"
-            )
-        ortho_C = torch.linalg.qr(self._C, "reduced")[0]
-        return torch.mm(self.latent_variables, ortho_C[:, :nb_dim]).detach()
-
-    def get_pca_projected_latent_variables(self, nb_dim=None):
-        if nb_dim is None:
-            nb_dim = self.rank
-        pca = PCA(n_components=nb_dim)
-        return pca.fit_transform(self.latent_variables.cpu())
-
-    @property
-    def model_in_a_dict(self):
-        return super().model_in_a_dict | {"rank": self._rank}
-
-    @model_in_a_dict.setter
-    def model_in_a_dict(self, model_in_a_dict):
-        self.set_data_from_dict(model_in_a_dict)
-        self.set_parameters_from_dict(model_in_a_dict)
-
-    @property
-    def C(self):
-        return self._C
-
-    def viz(self, ax=None, color=None, label=None, label_of_colors=None):
-        if self._rank != 2:
-            raise RuntimeError("Can not perform visualization for rank != 2.")
-        if ax is None:
-            ax = plt.gca()
-        proj_variables = self.get_projected_latent_variables()
-        xs = proj_variables[:, 0].cpu().numpy()
-        ys = proj_variables[:, 1].cpu().numpy()
-        sns.scatterplot(x=xs, y=ys, hue=color, ax=ax)
-        covariances = torch.diag_embed(self._S**2).detach()
-        for i in range(covariances.shape[0]):
-            plot_ellipse(xs[i], ys[i], cov=covariances[i], ax=ax)
-        return ax
-
-
-class ZIPLN(PLN):
-    NAME = "ZIPLN"
-
-    @property
-    def description(self):
-        return f"with full covariance model and zero-inflation."
-
-    def random_init_model_parameters(self):
-        super().random_init_model_parameters()
-        self.Theta_zero = torch.randn(self._d, self._p)
-        self._Sigma = torch.diag(torch.ones(self._p)).to(DEVICE)
-
-    # should change the good initialization, especially for Theta_zero
-    def smart_init_model_parameters(self):
-        super().smart_init_model_parameters()
-        self._Sigma = init_sigma(self.counts, self.covariates, self.offsets, self._beta)
-        self._Theta_zero = torch.randn(self._d, self._p)
-
-    def random_init_var_parameters(self):
-        self.dirac = self.counts == 0
-        self._M = torch.randn(self._n, self._p)
-        self._S = torch.randn(self._n, self._p)
-        self.pi = torch.empty(self._n, self._p).uniform_(0, 1).to(DEVICE) * self.dirac
-
-    def compute_elbo(self):
-        return ELBOZIPLN(
-            self.counts,
-            self.covariates,
-            self.offsets,
-            self._M,
-            self._S,
-            self.pi,
-            self._Sigma,
-            self._beta,
-            self.Theta_zero,
-            self.dirac,
-        )
-
-    @property
-    def list_of_parameters_needing_gradient(self):
-        return [self._M, self._S, self._Theta_zero]
-
-    def update_closed_forms(self):
-        self._beta = closed_formula_beta(self.covariates, self._M)
-        self._Sigma = closed_formula_Sigma(
-            self.covariates, self._M, self._S, self._beta, self._n
-        )
-        self.pi = closed_formula_pi(
-            self.offsets,
-            self._M,
-            self._S,
-            self.dirac,
-            self.covariates,
-            self._Theta_zero,
-        )
-
-    @property
-    def number_of_parameters(self):
-        return self._p * (2 * self._d + (self._p + 1) / 2)
diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py
index 8e910c3cac1c49b2c3e0aaf9775c05728992d61b..15591263ad54a35729ad9b724a7467f8be9b30b5 100644
--- a/pyPLNmodels/__init__.py
+++ b/pyPLNmodels/__init__.py
@@ -1,6 +1,13 @@
-# __version__ = "0.0.17"
+from .models import PLNPCA, PLN  # pylint:disable=[C0114]
+from .elbos import profiled_elbo_pln, elbo_plnpca, elbo_pln
+from ._utils import get_simulated_count_data, get_real_count_data
 
-from .VEM import PLNPCA, PLN
-from .elbos import profiledELBOPLN, ELBOPLNPCA, ELBOPLN
-
-__all__ = ("PLNPCA", "PLN", "profiledELBOPLN", "ELBOPLNPCA", "ELBOPLN")
+__all__ = (
+    "PLNPCA",
+    "PLN",
+    "profiled_elbo_pln",
+    "elbo_plnpca",
+    "elbo_pln",
+    "get_simulated_count_data",
+    "get_real_count_data",
+)
diff --git a/pyPLNmodels/_closed_forms.py b/pyPLNmodels/_closed_forms.py
index 5964d801ac2abed5bd886f09b097f34c72853289..889cf221a5711e7d30c8c5ec4b7dfee49eedd517 100644
--- a/pyPLNmodels/_closed_forms.py
+++ b/pyPLNmodels/_closed_forms.py
@@ -1,21 +1,22 @@
-import torch
+import torch  # pylint:disable=[C0114]
 
 
-def closed_formula_Sigma(covariates, M, S, beta, n):
-    """Closed form for Sigma for the M step for the noPCA model."""
-    MmoinsXB = M - torch.mm(covariates, beta)
-    closed = torch.mm(MmoinsXB.T, MmoinsXB)
-    closed += torch.diag(torch.sum(torch.multiply(S, S), dim=0))
-    return 1 / (n) * closed
+def closed_formula_covariance(covariates, latent_mean, latent_var, coef, n_samples):
+    """Closed form for covariance for the M step for the noPCA model."""
+    m_moins_xb = latent_mean - covariates @ coef
+    closed = m_moins_xb.T @ m_moins_xb + torch.diag(
+        torch.sum(torch.square(latent_var), dim=0)
+    )
+    return closed / n_samples
 
 
-def closed_formula_beta(covariates, M):
-    """Closed form for beta for the M step for the noPCA model."""
-    return torch.mm(
-        torch.mm(torch.inverse(torch.mm(covariates.T, covariates)), covariates.T), M
-    )
+def closed_formula_coef(covariates, latent_mean):
+    """Closed form for coef for the M step for the noPCA model."""
+    return torch.inverse(covariates.T @ covariates) @ covariates.T @ latent_mean
 
 
-def closed_formula_pi(offsets, M, S, dirac, covariates, Theta_zero):
-    A = torch.exp(offsets + M + torch.multiply(S, S) / 2)
-    return torch.multiply(torch.sigmoid(A + torch.mm(covariates, Theta_zero)), dirac)
+def closed_formula_pi(
+    offsets, latent_mean, latent_var, dirac, covariates, _coef_inflation
+):
+    poiss_param = torch.exp(offsets + latent_mean + 0.5 * torch.square(latent_var))
+    return torch.sigmoid(poiss_param + torch.mm(covariates, _coef_inflation)) * dirac
diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py
index 6255aedf1759611350b45ef5ceb3af861683f593..1024746c6dd76601317650532dd981c24d4c8ba7 100644
--- a/pyPLNmodels/_utils.py
+++ b/pyPLNmodels/_utils.py
@@ -1,5 +1,5 @@
 import math  # pylint:disable=[C0114]
-from scipy.linalg import toeplitz
+import warnings
 
 import matplotlib.pyplot as plt
 import numpy as np
@@ -7,13 +7,10 @@ import torch
 import torch.linalg as TLA
 import pandas as pd
 from matplotlib.patches import Ellipse
-import matplotlib.transforms as transforms
-
+from matplotlib import transforms
 
 torch.set_default_dtype(torch.float64)
 
-# offsets is not doing anything in the initialization of Sigma. should be fixed.
-
 if torch.cuda.is_available():
     DEVICE = torch.device("cuda")
 else:
@@ -31,7 +28,7 @@ class PLNPlotArgs:
     def iteration_number(self):
         return len(self.elbos_list)
 
-    def show_loss(self, ax=None, savefig=False, name_doss=""):
+    def show_loss(self, ax=None, name_doss=""):
         """Show the ELBO of the algorithm along the iterations.
 
         args:
@@ -56,11 +53,8 @@ class PLNPlotArgs:
         ax.set_xlabel("Seconds")
         ax.set_ylabel("ELBO")
         ax.legend()
-        # save the graphic if needed
-        if savefig:
-            plt.savefig(name_doss)
 
-    def show_stopping_criterion(self, ax=None, savefig=False, name_doss=""):
+    def show_stopping_criterion(self, ax=None, name_doss=""):
         """Show the criterion of the algorithm along the iterations.
 
         args:
@@ -85,54 +79,50 @@ class PLNPlotArgs:
         ax.set_ylabel("Delta")
         ax.set_title("Increments")
         ax.legend()
-        # save the graphic if needed
-        if savefig:
-            plt.savefig(name_doss)
 
 
-def init_sigma(counts, covariates, offsets, beta):
-    """Initialization for Sigma for the PLN model. Take the log of counts
-    (careful when counts=0), remove the covariates effects X@beta and
+def init_sigma(counts, covariates, coef):
+    """Initialization for covariance for the PLN model. Take the log of counts
+    (careful when counts=0), remove the covariates effects X@coef and
     then do as a MLE for Gaussians samples.
     Args :
             counts: torch.tensor. Samples with size (n,p)
             0: torch.tensor. Offset, size (n,p)
             covariates: torch.tensor. Covariates, size (n,d)
-            beta: torch.tensor of size (d,p)
+            coef: torch.tensor of size (d,p)
     Returns : torch.tensor of size (p,p).
     """
-    # Take the log of counts, and be careful when counts = 0. If counts = 0,
-    # then we set the log(counts) as 0.
     log_y = torch.log(counts + (counts == 0) * math.exp(-2))
-    # we remove the mean so that we see only the covariances
     log_y_centered = (
-        log_y - torch.matmul(covariates.unsqueeze(1), beta.unsqueeze(0)).squeeze()
+        log_y - torch.matmul(covariates.unsqueeze(1), coef.unsqueeze(0)).squeeze()
     )
     # MLE in a Gaussian setting
-    n = counts.shape[0]
-    Sigma_hat = 1 / (n - 1) * (log_y_centered.T) @ log_y_centered
-    return Sigma_hat
+    n_samples = counts.shape[0]
+    sigma_hat = 1 / (n_samples - 1) * (log_y_centered.T) @ log_y_centered
+    return sigma_hat
 
 
-def init_c(counts, covariates, offsets, beta, rank):
-    """Inititalization for C for the PLN model. Get a first
-    guess for Sigma that is easier to estimate and then takes
-    the rank largest eigenvectors to get C.
+def init_components(counts, covariates, coef, rank):
+    """Inititalization for components for the PLN model. Get a first
+    guess for covariance that is easier to estimate and then takes
+    the rank largest eigenvectors to get components.
     Args :
         counts: torch.tensor. Samples with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covarites: torch.tensor. Covariates, size (n,d)
-        beta: torch.tensor of size (d,p)
+        coef: torch.tensor of size (d,p)
         rank: int. The dimension of the latent space, i.e. the reducted dimension.
     Returns :
-        torch.tensor of size (p,rank). The initialization of C.
+        torch.tensor of size (p,rank). The initialization of components.
     """
-    Sigma_hat = init_sigma(counts, covariates, offsets, beta).detach()
-    C = C_from_Sigma(Sigma_hat, rank)
-    return C
+    sigma_hat = init_sigma(counts, covariates, coef).detach()
+    components = components_from_covariance(sigma_hat, rank)
+    return components
 
 
-def init_M(counts, covariates, offsets, beta, C, N_iter_max=500, lr=0.01, eps=7e-3):
+def init_latent_mean(
+    counts, covariates, offsets, coef, components, n_iter_max=500, lr=0.01, eps=7e-3
+):
     """Initialization for the variational parameter M. Basically,
     the mode of the log_posterior is computed.
 
@@ -140,7 +130,7 @@ def init_M(counts, covariates, offsets, beta, C, N_iter_max=500, lr=0.01, eps=7e
         counts: torch.tensor. Samples with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covariates: torch.tensor. Covariates, size (n,d)
-        beta: torch.tensor of size (d,p)
+        coef: torch.tensor of size (d,p)
         N_iter_max: int. The maximum number of iteration in
             the gradient ascent.
         lr: positive float. The learning rate of the optimizer.
@@ -149,25 +139,25 @@ def init_M(counts, covariates, offsets, beta, C, N_iter_max=500, lr=0.01, eps=7e
             is the t-th iteration of the algorithm.This parameter
             changes a lot the resulting time of the algorithm. Default is 9e-3.
     """
-    W = torch.randn(counts.shape[0], C.shape[1], device=DEVICE)
-    W.requires_grad_(True)
-    optimizer = torch.optim.Rprop([W], lr=lr)
+    mode = torch.randn(counts.shape[0], components.shape[1], device=DEVICE)
+    mode.requires_grad_(True)
+    optimizer = torch.optim.Rprop([mode], lr=lr)
     crit = 2 * eps
-    old_W = torch.clone(W)
+    old_mode = torch.clone(mode)
     keep_condition = True
     i = 0
-    while i < N_iter_max and keep_condition:
-        batch_loss = log_PW_given_Y(counts, covariates, offsets, W, C, beta)
+    while i < n_iter_max and keep_condition:
+        batch_loss = log_posterior(counts, covariates, offsets, mode, components, coef)
         loss = -torch.mean(batch_loss)
         loss.backward()
         optimizer.step()
-        crit = torch.max(torch.abs(W - old_W))
+        crit = torch.max(torch.abs(mode - old_mode))
         optimizer.zero_grad()
         if crit < eps and i > 2:
             keep_condition = False
-        old_W = torch.clone(W)
+        old_mode = torch.clone(mode)
         i += 1
-    return W
+    return mode
 
 
 def sigmoid(tens):
@@ -175,89 +165,77 @@ def sigmoid(tens):
     return 1 / (1 + torch.exp(-tens))
 
 
-def sample_PLN(C, beta, covariates, offsets, B_zero=None):
-    """Sample Poisson log Normal variables. If B_zero is not None, the model will
+def sample_pln(components, coef, covariates, offsets, _coef_inflation=None, seed=None):
+    """Sample Poisson log Normal variables. If _coef_inflation is not None, the model will
     be zero inflated.
 
     Args:
-        C: torch.tensor of size (p,rank). The matrix C of the PLN model
-        beta: torch.tensor of size (d,p). Regression parameter.
+        components: torch.tensor of size (p,rank). The matrix components of the PLN model
+        coef: torch.tensor of size (d,p). Regression parameter.
         0: torch.tensor of size (n,p). Offsets.
         covariates : torch.tensor of size (n,d). Covariates.
-        B_zero: torch.tensor of size (d,p), optional. If B_zero is not None,
+        _coef_inflation: torch.tensor of size (d,p), optional. If _coef_inflation is not None,
              the ZIPLN model is chosen, so that it will add a
              Bernouilli layer. Default is None.
     Returns :
         counts: torch.tensor of size (n,p), the count variables.
         Z: torch.tensor of size (n,p), the gaussian latent variables.
         ksi: torch.tensor of size (n,p), the bernoulli latent variables
-        (full of zeros if B_zero is None).
+        (full of zeros if _coef_inflation is None).
     """
-
-    n = offsets.shape[0]
-    rank = C.shape[1]
-    Z = torch.mm(torch.randn(n, rank, device=DEVICE), C.T) + covariates @ beta
-    parameter = torch.exp(offsets + Z)
-    if B_zero is not None:
+    prev_state = torch.random.get_rng_state()
+    if seed is not None:
+        torch.random.manual_seed(seed)
+    n_samples = offsets.shape[0]
+    rank = components.shape[1]
+    full_of_ones = torch.ones((n_samples, 1))
+    if covariates is None:
+        covariates = full_of_ones
+    else:
+        covariates = torch.stack((full_of_ones, covariates), axis=1).squeeze()
+    gaussian = (
+        torch.mm(torch.randn(n_samples, rank, device=DEVICE), components.T)
+        + covariates @ coef
+    )
+    parameter = torch.exp(offsets + gaussian)
+    if _coef_inflation is not None:
         print("ZIPLN is sampled")
-        ZI_cov = covariates @ B_zero
-        ksi = torch.bernoulli(1 / (1 + torch.exp(-ZI_cov)))
+        zero_inflated_mean = covariates @ _coef_inflation
+        ksi = torch.bernoulli(1 / (1 + torch.exp(-zero_inflated_mean)))
     else:
         ksi = 0
     counts = (1 - ksi) * torch.poisson(parameter)
-    return counts, Z, ksi
-
-
-def logit(tens):
-    """logit function. If x is too close from 1, we set the result to 0.
-    performs logit element wise."""
-    return torch.nan_to_num(torch.log(x / (1 - tens)), nan=0, neginf=0, posinf=0)
+    torch.random.set_rng_state(prev_state)
+    return counts, gaussian, ksi
 
 
-def build_block_Sigma(p, block_size):
-    """Build a matrix per block of size (p,p). There will be p//block_size+1
-    blocks of size block_size. The first p//block_size ones will be the same
-    size. The last one will have a smaller size (size (0,0)
-    if p%block_size = 0).
-    Args:
-        p: int.
-        block_size: int. Should be lower than p.
-    Returns: a torch.tensor of size (p,p) and symmetric.
-    """
-    k = p // block_size  # number of matrices of size p//block_size.
-    alea = np.random.randn(k + 1) ** 2 + 1
-    Sigma = np.zeros((p, p))
-    last_block_size = p - k * block_size
-    for i in range(k):
-        Sigma[
-            i * block_size : (i + 1) * block_size, i * block_size : (i + 1) * block_size
-        ] = alea[i] * toeplitz(0.7 ** np.arange(block_size))
-    # Last block matrix.
-    if last_block_size > 0:
-        Sigma[-last_block_size:, -last_block_size:] = alea[k] * toeplitz(
-            0.7 ** np.arange(last_block_size)
-        )
-    return Sigma
+# def logit(tens):
+#     """logit function. If x is too close from 1, we set the result to 0.
+#     performs logit element wise."""
+#     return torch.nan_to_num(torch.log(x / (1 - tens)),
+# nan=0, neginf=0, posinf=0)
 
 
-def C_from_Sigma(Sigma, rank):
-    """Get the best matrix of size (p,rank) when Sigma is of
-    size (p,p). i.e. reduces norm(Sigma-C@C.T)
+def components_from_covariance(covariance, rank):
+    """Get the best matrix of size (p,rank) when covariance is of
+    size (p,p). i.e. reduces norm(covariance-components@components.T)
     Args :
-        Sigma: torch.tensor of size (p,p). Should be positive definite and
+        covariance: torch.tensor of size (p,p). Should be positive definite and
             symmetric.
-        rank: int. The number of columns wanted for C
+        rank: int. The number of columns wanted for components
 
     Returns:
-        C_reduct: torch.tensor of size (p,rank) containing the rank eigenvectors with
+        components_reduct: torch.tensor of size (p,rank) containing the rank eigenvectors with
         largest eigenvalues.
     """
-    w, v = TLA.eigh(Sigma)
-    C_reduct = v[:, -rank:] @ torch.diag(torch.sqrt(w[-rank:]))
-    return C_reduct
+    eigenvalues, eigenvectors = TLA.eigh(covariance)
+    requested_components = eigenvectors[:, -rank:] @ torch.diag(
+        torch.sqrt(eigenvalues[-rank:])
+    )
+    return requested_components
 
 
-def init_beta(counts, covariates, offsets):
+def init_coef(counts, covariates):
     log_y = torch.log(counts + (counts == 0) * math.exp(-2))
     log_y = log_y.to(DEVICE)
     return torch.matmul(
@@ -266,7 +244,7 @@ def init_beta(counts, covariates, offsets):
     )
 
 
-def log_stirling(n):
+def log_stirling(integer):
     """Compute log(n!) even for n large. We use the Stirling formula to avoid
     numerical infinite values of n!.
     Args:
@@ -274,38 +252,44 @@ def log_stirling(n):
     Returns:
         An approximation of log(n_!) element-wise.
     """
-    n_ = n + (n == 0)  # Replace the 0 with 1. It doesn't change anything since 0! = 1!
-    return torch.log(torch.sqrt(2 * np.pi * n_)) + n_ * torch.log(n_ / math.exp(1))
+    integer_ = integer + (
+        integer == 0
+    )  # Replace the 0 with 1. It doesn't change anything since 0! = 1!
+    return torch.log(torch.sqrt(2 * np.pi * integer_)) + integer_ * torch.log(
+        integer_ / math.exp(1)
+    )
 
 
-def log_PW_given_Y(counts_b, covariates_b, offsets_b, W, C, beta):
+def log_posterior(counts, covariates, offsets, posterior_mean, components, coef):
     """Compute the log posterior of the PLN model. Compute it either
-    for W of size (N_samples, N_batch,rank) or (batch_size, rank). Need to have
+    for posterior_mean of size (N_samples, N_batch,rank) or (batch_size, rank). Need to have
     both cases since it is done for both cases after. Please the mathematical
     description of the package for the formula.
     Args :
-        counts_b : torch.tensor of size (batch_size, p)
-        covariates_b : torch.tensor of size (batch_size, d) or (d)
+        counts : torch.tensor of size (batch_size, p)
+        covariates : torch.tensor of size (batch_size, d) or (d)
     Returns: torch.tensor of size (N_samples, batch_size) or (batch_size).
     """
-    length = len(W.shape)
-    rank = W.shape[-1]
-    if length == 2:
-        CW = torch.matmul(C.unsqueeze(0), W.unsqueeze(2)).squeeze()
-    elif length == 3:
-        CW = torch.matmul(C.unsqueeze(0).unsqueeze(1), W.unsqueeze(3)).squeeze()
-
-    A_b = offsets_b + CW + covariates_b @ beta
-    first_term = -rank / 2 * math.log(2 * math.pi) - 1 / 2 * torch.norm(W, dim=-1) ** 2
+    length = len(posterior_mean.shape)
+    rank = posterior_mean.shape[-1]
+    components_posterior_mean = torch.matmul(
+        components.unsqueeze(0), posterior_mean.unsqueeze(2)
+    ).squeeze()
+
+    log_lambda = offsets + components_posterior_mean + covariates @ coef
+    first_term = (
+        -rank / 2 * math.log(2 * math.pi)
+        - 1 / 2 * torch.norm(posterior_mean, dim=-1) ** 2
+    )
     second_term = torch.sum(
-        -torch.exp(A_b) + A_b * counts_b - log_stirling(counts_b), axis=-1
+        -torch.exp(log_lambda) + log_lambda * counts - log_stirling(counts), axis=-1
     )
     return first_term + second_term
 
 
 def trunc_log(tens, eps=1e-16):
-    y = torch.min(torch.max(tens, torch.tensor([eps])), torch.tensor([1 - eps]))
-    return torch.log(y)
+    integer = torch.min(torch.max(tens, torch.tensor([eps])), torch.tensor([1 - eps]))
+    return torch.log(integer)
 
 
 def get_offsets_from_sum_of_counts(counts):
@@ -317,14 +301,14 @@ def raise_wrong_dimension_error(
     str_first_array, str_second_array, dim_first_array, dim_second_array, dim_of_error
 ):
     msg = (
-        f"The size of tensor {str_first_array} ({dim_first_array}) must match"
-        f"the size of tensor {str_second_array} ({dim_second_array}) at"
+        f"The size of tensor {str_first_array} ({dim_first_array}) must match "
+        f"the size of tensor {str_second_array} ({dim_second_array}) at "
         f"non-singleton dimension {dim_of_error}"
     )
     raise ValueError(msg)
 
 
-def check_dimensions_are_equal(
+def check_two_dimensions_are_equal(
     str_first_array, str_second_array, dim_first_array, dim_second_array, dim_of_error
 ):
     if dim_first_array != dim_second_array:
@@ -337,18 +321,6 @@ def check_dimensions_are_equal(
         )
 
 
-def init_S(counts, covariates, offsets, beta, C, M):
-    n, rank = M.shape
-    batch_matrix = torch.matmul(C.unsqueeze(2), C.unsqueeze(1)).unsqueeze(0)
-    CW = torch.matmul(C.unsqueeze(0), M.unsqueeze(2)).squeeze()
-    common = torch.exp(offsets + covariates @ beta + CW).unsqueeze(2).unsqueeze(3)
-    prod = batch_matrix * common
-    hess_posterior = torch.sum(prod, axis=1) + torch.eye(rank).to(DEVICE)
-    inv_hess_posterior = -torch.inverse(hess_posterior)
-    hess_posterior = torch.diagonal(inv_hess_posterior, dim1=-2, dim2=-1)
-    return hess_posterior
-
-
 def format_data(data):
     if isinstance(data, pd.DataFrame):
         return torch.from_numpy(data.values).double().to(DEVICE)
@@ -357,38 +329,48 @@ def format_data(data):
     if isinstance(data, torch.Tensor):
         return data
     raise AttributeError(
-        "Please insert either a numpy array, pandas.DataFrame or torch.tensor"
+        "Please insert either a numpy.ndarray, pandas.DataFrame or torch.Tensor"
     )
 
 
-def check_parameters_shape(counts, covariates, offsets):
-    n_counts, p_counts = counts.shape
-    n_offsets, p_offsets = offsets.shape
-    n_cov, _ = covariates.shape
-    check_dimensions_are_equal("counts", "offsets", n_counts, n_offsets, 0)
-    check_dimensions_are_equal("counts", "covariates", n_counts, n_cov, 0)
-    check_dimensions_are_equal("counts", "offsets", p_counts, p_offsets, 1)
+def format_model_param(counts, covariates, offsets, offsets_formula):
+    counts = format_data(counts)
+    covariates = prepare_covariates(covariates, counts.shape[0])
+    if offsets is None:
+        if offsets_formula == "logsum":
+            print("Setting the offsets as the log of the sum of counts")
+            offsets = (
+                torch.log(get_offsets_from_sum_of_counts(counts)).double().to(DEVICE)
+            )
+        else:
+            offsets = torch.zeros(counts.shape, device=DEVICE)
+    else:
+        offsets = format_data(offsets).to(DEVICE)
+    return counts, covariates, offsets
 
 
-def extract_data(dictionnary, parameter_in_string):
-    try:
-        return dictionnary[parameter_in_string]
-    except KeyError:
-        return None
+def prepare_covariates(covariates, n_samples):
+    full_of_ones = torch.full((n_samples, 1), 1, device=DEVICE).double()
+    if covariates is None:
+        return full_of_ones
+    covariates = format_data(covariates)
+    return torch.concat((full_of_ones, covariates), axis=1)
 
 
-def extract_cov_offsets_offsetsformula(dictionnary):
-    covariates = extract_data(dictionnary, "covariates")
-    offsets = extract_data(dictionnary, "offsets")
-    offsets_formula = extract_data(dictionnary, "offsets_formula")
-    return covariates, offsets, offsets_formula
+def check_data_shape(counts, covariates, offsets):
+    n_counts, p_counts = counts.shape
+    n_offsets, p_offsets = offsets.shape
+    n_cov, _ = covariates.shape
+    check_two_dimensions_are_equal("counts", "offsets", n_counts, n_offsets, 0)
+    check_two_dimensions_are_equal("counts", "covariates", n_counts, n_cov, 0)
+    check_two_dimensions_are_equal("counts", "offsets", p_counts, p_offsets, 1)
 
 
 def nice_string_of_dict(dictionnary):
     return_string = ""
     for each_row in zip(*([i] + [j] for i, j in dictionnary.items())):
         for element in list(each_row):
-            return_string += f"{str(element):>10}"
+            return_string += f"{str(element):>12}"
         return_string += "\n"
     return return_string
 
@@ -402,7 +384,7 @@ def plot_ellipse(mean_x, mean_y, cov, ax):
         width=ell_radius_x * 2,
         height=ell_radius_y * 2,
         linestyle="--",
-        alpha=0.1,
+        alpha=0.2,
     )
 
     scale_x = np.sqrt(cov[0, 0])
@@ -413,7 +395,92 @@ def plot_ellipse(mean_x, mean_y, cov, ax):
         .scale(scale_x, scale_y)
         .translate(mean_x, mean_y)
     )
-
     ellipse.set_transform(transf + ax.transData)
     ax.add_patch(ellipse)
     return pearson
+
+
+def get_components_simulation(dim, rank):
+    block_size = dim // rank
+    prev_state = torch.random.get_rng_state()
+    torch.random.manual_seed(0)
+    components = torch.zeros(dim, rank)
+    for column_number in range(rank):
+        components[
+            column_number * block_size : (column_number + 1) * block_size, column_number
+        ] = 1
+    components += torch.randn(dim, rank) / 8
+    torch.random.set_rng_state(prev_state)
+    return components.to(DEVICE)
+
+
+def get_simulation_offsets_cov_coef(n_samples, nb_cov, dim):
+    prev_state = torch.random.get_rng_state()
+    torch.random.manual_seed(0)
+    if nb_cov < 2:
+        covariates = None
+    else:
+        covariates = torch.randint(
+            low=-1,
+            high=2,
+            size=(n_samples, nb_cov - 1),
+            dtype=torch.float64,
+            device=DEVICE,
+        )
+    coef = torch.randn(nb_cov, dim, device=DEVICE)
+    offsets = torch.randint(
+        low=0, high=2, size=(n_samples, dim), dtype=torch.float64, device=DEVICE
+    )
+    torch.random.set_rng_state(prev_state)
+    return offsets, covariates, coef
+
+
+def get_simulated_count_data(
+    n_samples=100, dim=25, rank=5, nb_cov=1, return_true_param=False, seed=0
+):
+    components = get_components_simulation(dim, rank)
+    offsets, cov, true_coef = get_simulation_offsets_cov_coef(n_samples, nb_cov, dim)
+    true_covariance = torch.matmul(components, components.T)
+    counts, _, _ = sample_pln(components, true_coef, cov, offsets, seed=seed)
+    if return_true_param is True:
+        return counts, cov, offsets, true_covariance, true_coef
+    return counts, cov, offsets
+
+
+def get_real_count_data(n_samples=270, dim=100):
+    if n_samples > 297:
+        warnings.warn(
+            f"\nTaking the whole 270 samples of the dataset. Requested:n_samples={n_samples}, returned:270"
+        )
+        n_samples = 270
+    if dim > 100:
+        warnings.warn(
+            f"\nTaking the whole 100 variables. Requested:dim={dim}, returned:100"
+        )
+        dim = 100
+    counts = pd.read_csv("../example_data/real_data/Y_mark.csv").values[
+        :n_samples, :dim
+    ]
+    print(f"Returning dataset of size {counts.shape}")
+    return counts
+
+
+def closest(lst, element):
+    lst = np.asarray(lst)
+    idx = (np.abs(lst - element)).argmin()
+    return lst[idx]
+
+
+def check_dimensions_are_equal(tens1, tens2):
+    if tens1.shape[0] != tens2.shape[0] or tens1.shape[1] != tens2.shape[1]:
+        raise ValueError("Tensors should have the same size.")
+
+
+def to_tensor(obj):
+    if isinstance(obj, np.ndarray):
+        return torch.from_numpy(obj)
+    if isinstance(obj, torch.Tensor):
+        return obj
+    if isinstance(obj, pd.DataFrame):
+        return torch.from_numpy(obj.values)
+    raise TypeError("Please give either a nd.array or torch.Tensor or pd.DataFrame")
diff --git a/example_data/real_data/oaks_counts.csv b/pyPLNmodels/data/oaks/counts.csv
similarity index 96%
rename from example_data/real_data/oaks_counts.csv
rename to pyPLNmodels/data/oaks/counts.csv
index 7a1d9331d84aaed408cb558610eec46253cdbea1..293b0e422e2910e1a339d59d6f7a78c025d1480a 100644
--- a/example_data/real_data/oaks_counts.csv
+++ b/pyPLNmodels/data/oaks/counts.csv
@@ -1,4 +1,4 @@
-"b_OTU_1045","b_OTU_109","b_OTU_1093","b_OTU_11","b_OTU_112","b_OTU_1191","b_OTU_1200","b_OTU_123","b_OTU_13","b_OTU_1431","b_OTU_153","b_OTU_17","b_OTU_171","b_OTU_18","b_OTU_182","b_OTU_20","b_OTU_21","b_OTU_22","b_OTU_23","b_OTU_235","b_OTU_24","b_OTU_25","b_OTU_26","b_OTU_27","b_OTU_29","b_OTU_304","b_OTU_31","b_OTU_329","b_OTU_33","b_OTU_34","b_OTU_35","b_OTU_36","b_OTU_364","b_OTU_37","b_OTU_39","b_OTU_41","b_OTU_42","b_OTU_44","b_OTU_443","b_OTU_444","b_OTU_447","b_OTU_46","b_OTU_47","b_OTU_48","b_OTU_49","b_OTU_51","b_OTU_548","b_OTU_55","b_OTU_56","b_OTU_57","b_OTU_58","b_OTU_59","b_OTU_60","b_OTU_625","b_OTU_63","b_OTU_662","b_OTU_69","b_OTU_72","b_OTU_73","b_OTU_74","b_OTU_76","b_OTU_8","b_OTU_81","b_OTU_87","b_OTU_90","b_OTU_98","f_OTU_1","f_OTU_2","f_OTU_3","f_OTU_4","f_OTU_5","f_OTU_6","f_OTU_7","f_OTU_8","f_OTU_9","f_OTU_10","f_OTU_12","f_OTU_13","f_OTU_15","f_OTU_17","f_OTU_19","f_OTU_20","f_OTU_23","f_OTU_24","f_OTU_25","f_OTU_26","f_OTU_27","f_OTU_28","f_OTU_29","f_OTU_30","f_OTU_32","f_OTU_33","f_OTU_39","f_OTU_40","f_OTU_43","f_OTU_46","f_OTU_57","f_OTU_63","f_OTU_65","f_OTU_68","f_OTU_79","f_OTU_317","f_OTU_576","f_OTU_579","f_OTU_662","f_OTU_672","f_OTU_1011","f_OTU_1085","f_OTU_1090","f_OTU_1141","f_OTU_1278","f_OTU_1567","f_OTU_1656","E_alphitoides"
+b_OTU_1045,b_OTU_109,b_OTU_1093,b_OTU_11,b_OTU_112,b_OTU_1191,b_OTU_1200,b_OTU_123,b_OTU_13,b_OTU_1431,b_OTU_153,b_OTU_17,b_OTU_171,b_OTU_18,b_OTU_182,b_OTU_20,b_OTU_21,b_OTU_22,b_OTU_23,b_OTU_235,b_OTU_24,b_OTU_25,b_OTU_26,b_OTU_27,b_OTU_29,b_OTU_304,b_OTU_31,b_OTU_329,b_OTU_33,b_OTU_34,b_OTU_35,b_OTU_36,b_OTU_364,b_OTU_37,b_OTU_39,b_OTU_41,b_OTU_42,b_OTU_44,b_OTU_443,b_OTU_444,b_OTU_447,b_OTU_46,b_OTU_47,b_OTU_48,b_OTU_49,b_OTU_51,b_OTU_548,b_OTU_55,b_OTU_56,b_OTU_57,b_OTU_58,b_OTU_59,b_OTU_60,b_OTU_625,b_OTU_63,b_OTU_662,b_OTU_69,b_OTU_72,b_OTU_73,b_OTU_74,b_OTU_76,b_OTU_8,b_OTU_81,b_OTU_87,b_OTU_90,b_OTU_98,f_OTU_1,f_OTU_2,f_OTU_3,f_OTU_4,f_OTU_5,f_OTU_6,f_OTU_7,f_OTU_8,f_OTU_9,f_OTU_10,f_OTU_12,f_OTU_13,f_OTU_15,f_OTU_17,f_OTU_19,f_OTU_20,f_OTU_23,f_OTU_24,f_OTU_25,f_OTU_26,f_OTU_27,f_OTU_28,f_OTU_29,f_OTU_30,f_OTU_32,f_OTU_33,f_OTU_39,f_OTU_40,f_OTU_43,f_OTU_46,f_OTU_57,f_OTU_63,f_OTU_65,f_OTU_68,f_OTU_79,f_OTU_317,f_OTU_576,f_OTU_579,f_OTU_662,f_OTU_672,f_OTU_1011,f_OTU_1085,f_OTU_1090,f_OTU_1141,f_OTU_1278,f_OTU_1567,f_OTU_1656,E_alphitoides
 0,0,0,6,146,1,6,6,68,0,41,33,0,322,0,5,468,0,16,6,1,0,2112,34,1,0,16,10,0,1669,4,3,19,28,1585,4,4,23,0,1,0,3,12,2,2,7,403,0,6,9,30,5,10,8,5,0,4,7,28,12,35,114,1,4,288,1,72,5,131,0,4,6,11,5,12,8,1181,21,514,11,6,26,4,4,0,9,2,0,2,3,0,3,10,0,0,8,1,1,1,10,0,0,4,0,8,4,89,17,0,6,106,2,3,0
 0,0,0,0,0,1,0,0,4,1,0,0,0,4,0,2,2,2,57,0,0,0,4,74,0,0,0,0,0,3,1,0,2,0,2,0,0,0,0,1,0,0,0,0,0,17,1,0,13,31,0,0,7,13,9,0,1,0,0,14,0,18,28,1,2,6,516,14,362,0,0,13,3,78,8,43,9,20,1,12,115,40,19,4,0,53,4,0,6,4,0,56,1,7,0,21,4,23,7,0,0,0,11,0,39,0,41,9,0,8,224,5,3,0
 0,0,0,2,0,0,0,0,128,0,1,1,0,2,0,0,3,0,10,0,0,0,2,51,1,1,1,1,0,1,3,0,5,2,5,2,1,0,0,0,0,0,1,0,0,3,1,0,6,25,2,0,2,4,3,0,0,0,0,5,2,27,4,1,3,0,305,24,238,0,1,37,5,50,20,75,1,28,2,6,26,58,16,17,0,54,2,2,1,2,0,20,0,19,11,63,0,12,12,2,0,0,19,0,13,8,137,36,0,24,295,9,5,0
diff --git a/pyPLNmodels/data/oaks/covariates.csv b/pyPLNmodels/data/oaks/covariates.csv
new file mode 100644
index 0000000000000000000000000000000000000000..0224d77c178e92f0a1794568fa78d901f3e38568
--- /dev/null
+++ b/pyPLNmodels/data/oaks/covariates.csv
@@ -0,0 +1,117 @@
+tree,distTOground,orientation
+intermediate,155.5,SW
+intermediate,144.5,SW
+intermediate,141.5,SW
+intermediate,134.5,SW
+intermediate,130.5,SW
+intermediate,129.5,SW
+intermediate,121.5,SW
+intermediate,111.5,SW
+intermediate,107.5,SW
+intermediate,212,SW
+intermediate,205,SW
+intermediate,198,SW
+intermediate,193,SW
+intermediate,190,SW
+intermediate,174,SW
+intermediate,171,SW
+intermediate,166,SW
+intermediate,156,SW
+intermediate,148,SW
+intermediate,245,NE
+intermediate,239,NE
+intermediate,226,NE
+intermediate,211,NE
+intermediate,201,NE
+intermediate,188,NE
+intermediate,176,NE
+intermediate,172,NE
+intermediate,166,NE
+intermediate,240,NE
+intermediate,237,NE
+intermediate,228,NE
+intermediate,221,NE
+intermediate,210,NE
+intermediate,204,NE
+intermediate,197,NE
+intermediate,194,NE
+intermediate,188,NE
+intermediate,183,NE
+susceptible,142,SW
+susceptible,141,SW
+susceptible,138,SW
+susceptible,135,SW
+susceptible,133,SW
+susceptible,131,SW
+susceptible,127,SW
+susceptible,118,SW
+susceptible,113,SW
+susceptible,105,SW
+susceptible,224,SW
+susceptible,226,SW
+susceptible,226,SW
+susceptible,222,SW
+susceptible,227,SW
+susceptible,219,SW
+susceptible,211,SW
+susceptible,206,SW
+susceptible,203,SW
+susceptible,151,SW
+susceptible,249,NE
+susceptible,236,NE
+susceptible,216,NE
+susceptible,208,NE
+susceptible,181,NE
+susceptible,175,NE
+susceptible,149,NE
+susceptible,140,NE
+susceptible,117,NE
+susceptible,272,NE
+susceptible,268,NE
+susceptible,264,NE
+susceptible,258,NE
+susceptible,254,NE
+susceptible,246,NE
+susceptible,242,NE
+susceptible,235,NE
+susceptible,228,NE
+susceptible,212,NE
+resistant,116,SW
+resistant,113,SW
+resistant,108,SW
+resistant,100,SW
+resistant,97,SW
+resistant,93,SW
+resistant,83,SW
+resistant,79,SW
+resistant,63,SW
+resistant,229,SW
+resistant,225,SW
+resistant,217,SW
+resistant,203,SW
+resistant,198,SW
+resistant,187,SW
+resistant,180,SW
+resistant,171,SW
+resistant,163,SW
+resistant,158,SW
+resistant,123,NE
+resistant,122,NE
+resistant,116,NE
+resistant,109,NE
+resistant,105,NE
+resistant,101,NE
+resistant,98,NE
+resistant,94,NE
+resistant,82,NE
+resistant,79,NE
+resistant,229,NE
+resistant,223,NE
+resistant,216,NE
+resistant,206,NE
+resistant,197,NE
+resistant,187,NE
+resistant,177,NE
+resistant,169,NE
+resistant,161,NE
+resistant,125,NE
diff --git a/example_data/real_data/oaks_offsets.csv b/pyPLNmodels/data/oaks/offsets.csv
similarity index 97%
rename from example_data/real_data/oaks_offsets.csv
rename to pyPLNmodels/data/oaks/offsets.csv
index 2cb12e38d161c12d5193730d2f5fbd6d7b5a92af..96a51bc54ba3ee345b4b0e3b2564aa2cd32e6bc9 100644
--- a/example_data/real_data/oaks_offsets.csv
+++ b/pyPLNmodels/data/oaks/offsets.csv
@@ -1,4 +1,4 @@
-"b_OTU_1045","b_OTU_109","b_OTU_1093","b_OTU_11","b_OTU_112","b_OTU_1191","b_OTU_1200","b_OTU_123","b_OTU_13","b_OTU_1431","b_OTU_153","b_OTU_17","b_OTU_171","b_OTU_18","b_OTU_182","b_OTU_20","b_OTU_21","b_OTU_22","b_OTU_23","b_OTU_235","b_OTU_24","b_OTU_25","b_OTU_26","b_OTU_27","b_OTU_29","b_OTU_304","b_OTU_31","b_OTU_329","b_OTU_33","b_OTU_34","b_OTU_35","b_OTU_36","b_OTU_364","b_OTU_37","b_OTU_39","b_OTU_41","b_OTU_42","b_OTU_44","b_OTU_443","b_OTU_444","b_OTU_447","b_OTU_46","b_OTU_47","b_OTU_48","b_OTU_49","b_OTU_51","b_OTU_548","b_OTU_55","b_OTU_56","b_OTU_57","b_OTU_58","b_OTU_59","b_OTU_60","b_OTU_625","b_OTU_63","b_OTU_662","b_OTU_69","b_OTU_72","b_OTU_73","b_OTU_74","b_OTU_76","b_OTU_8","b_OTU_81","b_OTU_87","b_OTU_90","b_OTU_98","f_OTU_1","f_OTU_2","f_OTU_3","f_OTU_4","f_OTU_5","f_OTU_6","f_OTU_7","f_OTU_8","f_OTU_9","f_OTU_10","f_OTU_12","f_OTU_13","f_OTU_15","f_OTU_17","f_OTU_19","f_OTU_20","f_OTU_23","f_OTU_24","f_OTU_25","f_OTU_26","f_OTU_27","f_OTU_28","f_OTU_29","f_OTU_30","f_OTU_32","f_OTU_33","f_OTU_39","f_OTU_40","f_OTU_43","f_OTU_46","f_OTU_57","f_OTU_63","f_OTU_65","f_OTU_68","f_OTU_79","f_OTU_317","f_OTU_576","f_OTU_579","f_OTU_662","f_OTU_672","f_OTU_1011","f_OTU_1085","f_OTU_1090","f_OTU_1141","f_OTU_1278","f_OTU_1567","f_OTU_1656","E_alphitoides"
+b_OTU_1045,b_OTU_109,b_OTU_1093,b_OTU_11,b_OTU_112,b_OTU_1191,b_OTU_1200,b_OTU_123,b_OTU_13,b_OTU_1431,b_OTU_153,b_OTU_17,b_OTU_171,b_OTU_18,b_OTU_182,b_OTU_20,b_OTU_21,b_OTU_22,b_OTU_23,b_OTU_235,b_OTU_24,b_OTU_25,b_OTU_26,b_OTU_27,b_OTU_29,b_OTU_304,b_OTU_31,b_OTU_329,b_OTU_33,b_OTU_34,b_OTU_35,b_OTU_36,b_OTU_364,b_OTU_37,b_OTU_39,b_OTU_41,b_OTU_42,b_OTU_44,b_OTU_443,b_OTU_444,b_OTU_447,b_OTU_46,b_OTU_47,b_OTU_48,b_OTU_49,b_OTU_51,b_OTU_548,b_OTU_55,b_OTU_56,b_OTU_57,b_OTU_58,b_OTU_59,b_OTU_60,b_OTU_625,b_OTU_63,b_OTU_662,b_OTU_69,b_OTU_72,b_OTU_73,b_OTU_74,b_OTU_76,b_OTU_8,b_OTU_81,b_OTU_87,b_OTU_90,b_OTU_98,f_OTU_1,f_OTU_2,f_OTU_3,f_OTU_4,f_OTU_5,f_OTU_6,f_OTU_7,f_OTU_8,f_OTU_9,f_OTU_10,f_OTU_12,f_OTU_13,f_OTU_15,f_OTU_17,f_OTU_19,f_OTU_20,f_OTU_23,f_OTU_24,f_OTU_25,f_OTU_26,f_OTU_27,f_OTU_28,f_OTU_29,f_OTU_30,f_OTU_32,f_OTU_33,f_OTU_39,f_OTU_40,f_OTU_43,f_OTU_46,f_OTU_57,f_OTU_63,f_OTU_65,f_OTU_68,f_OTU_79,f_OTU_317,f_OTU_576,f_OTU_579,f_OTU_662,f_OTU_672,f_OTU_1011,f_OTU_1085,f_OTU_1090,f_OTU_1141,f_OTU_1278,f_OTU_1567,f_OTU_1656,E_alphitoides
 8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,8315,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488,2488
 662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,662,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054,2054
 480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,480,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122,2122
diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py
index 81590770fb0c0da4640fa1d19492e4468e604980..160468cfc407dd5e271dde305a9db1aa02063fa6 100644
--- a/pyPLNmodels/elbos.py
+++ b/pyPLNmodels/elbos.py
@@ -1,9 +1,9 @@
-import torch
+import torch  # pylint:disable=[C0114]
 from ._utils import log_stirling, trunc_log
-from ._closed_forms import closed_formula_Sigma, closed_formula_beta
+from ._closed_forms import closed_formula_covariance, closed_formula_coef
 
 
-def ELBOPLN(counts, covariates, offsets, M, S, Sigma, beta):
+def elbo_pln(counts, covariates, offsets, latent_mean, latent_var, covariance, coef):
     """
     Compute the ELBO (Evidence LOwer Bound) for the PLN model. See the doc for more details
     on the computation.
@@ -12,66 +12,67 @@ def ELBOPLN(counts, covariates, offsets, M, S, Sigma, beta):
         counts: torch.tensor. Counts with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covariates: torch.tensor. Covariates, size (n,d)
-        M: torch.tensor. Variational parameter with size (n,p)
-        S: torch.tensor. Variational parameter with size (n,p)
-        Sigma: torch.tensor. Model parameter with size (p,p)
-        beta: torch.tensor. Model parameter with size (d,p)
+        latent_mean: torch.tensor. Variational parameter with size (n,p)
+        latent_var: torch.tensor. Variational parameter with size (n,p)
+        covariance: torch.tensor. Model parameter with size (p,p)
+        coef: torch.tensor. Model parameter with size (d,p)
     Returns:
         torch.tensor of size 1 with a gradient.
     """
-    n, p = counts.shape
-    SrondS = torch.multiply(S, S)
-    offsetsplusM = offsets + M
-    MmoinsXB = M - torch.mm(covariates, beta)
-    elbo = -n / 2 * torch.logdet(Sigma)
-    elbo += torch.sum(
-        torch.multiply(counts, offsetsplusM)
-        - torch.exp(offsetsplusM + SrondS / 2)
-        + 1 / 2 * torch.log(SrondS)
+    n_samples, dim = counts.shape
+    s_rond_s = torch.square(latent_var)
+    offsets_plus_m = offsets + latent_mean
+    m_minus_xb = latent_mean - covariates @ coef
+    d_plus_minus_xb2 = (
+        torch.diag(torch.sum(s_rond_s, dim=0)) + m_minus_xb.T @ m_minus_xb
     )
-    DplusMmoinsXB2 = torch.diag(torch.sum(SrondS, dim=0)) + torch.mm(
-        MmoinsXB.T, MmoinsXB
+    elbo = -0.5 * n_samples * torch.logdet(covariance)
+    elbo += torch.sum(
+        counts * offsets_plus_m
+        - 0.5 * torch.exp(offsets_plus_m + s_rond_s)
+        + 0.5 * torch.log(s_rond_s)
     )
-    moinspsur2n = 1 / 2 * torch.trace(torch.mm(torch.inverse(Sigma), DplusMmoinsXB2))
-    elbo -= 1 / 2 * torch.trace(torch.mm(torch.inverse(Sigma), DplusMmoinsXB2))
+    elbo -= 0.5 * torch.trace(torch.inverse(covariance) @ d_plus_minus_xb2)
     elbo -= torch.sum(log_stirling(counts))
-    elbo += n * p / 2
-    return elbo
+    elbo += 0.5 * n_samples * dim
+    return elbo / n_samples
 
 
-def profiledELBOPLN(counts, covariates, offsets, M, S):
+def profiled_elbo_pln(counts, covariates, offsets, latent_mean, latent_var):
     """
-    Compute the ELBO (Evidence LOwer Bound) for the PLN model. We use the fact that Sigma and beta are
-    completely determined by M,S, and the covariates. See the doc for more details
+    Compute the ELBO (Evidence LOwer Bound) for the PLN model. We use the fact that covariance and coef are
+    completely determined by latent_mean,latent_var, and the covariates. See the doc for more details
     on the computation.
 
     Args:
         counts: torch.tensor. Counts with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covariates: torch.tensor. Covariates, size (n,d)
-        M: torch.tensor. Variational parameter with size (n,p)
-        S: torch.tensor. Variational parameter with size (n,p)
-        Sigma: torch.tensor. Model parameter with size (p,p)
-        beta: torch.tensor. Model parameter with size (d,p)
+        latent_mean: torch.tensor. Variational parameter with size (n,p)
+        latent_var: torch.tensor. Variational parameter with size (n,p)
+        covariance: torch.tensor. Model parameter with size (p,p)
+        coef: torch.tensor. Model parameter with size (d,p)
     Returns:
         torch.tensor of size 1 with a gradient.
     """
-    n, p = counts.shape
-    SrondS = torch.multiply(S, S)
-    offsetsplusM = offsets + M
-    closed_beta = closed_formula_beta(covariates, M)
-    closed_Sigma = closed_formula_Sigma(covariates, M, S, closed_beta, n)
-    elbo = -n / 2 * torch.logdet(closed_Sigma)
+    n_samples, _ = counts.shape
+    s_rond_s = torch.square(latent_var)
+    offsets_plus_m = offsets + latent_mean
+    closed_coef = closed_formula_coef(covariates, latent_mean)
+    closed_covariance = closed_formula_covariance(
+        covariates, latent_mean, latent_var, closed_coef, n_samples
+    )
+    elbo = -0.5 * n_samples * torch.logdet(closed_covariance)
     elbo += torch.sum(
-        torch.multiply(counts, offsetsplusM)
-        - torch.exp(offsetsplusM + SrondS / 2)
-        + 1 / 2 * torch.log(SrondS)
+        counts * offsets_plus_m
+        - torch.exp(offsets_plus_m + s_rond_s / 2)
+        + 0.5 * torch.log(s_rond_s)
     )
     elbo -= torch.sum(log_stirling(counts))
-    return elbo
+    return elbo / n_samples
 
 
-def ELBOPLNPCA(counts, covariates, offsets, M, S, C, beta):
+def elbo_plnpca(counts, covariates, offsets, latent_mean, latent_var, components, coef):
     """
     Compute the ELBO (Evidence LOwer Bound) for the PLN model with a PCA
     parametrization. See the doc for more details on the computation.
@@ -80,36 +81,49 @@ def ELBOPLNPCA(counts, covariates, offsets, M, S, C, beta):
         counts: torch.tensor. Counts with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covariates: torch.tensor. Covariates, size (n,d)
-        M: torch.tensor. Variational parameter with size (n,p)
-        S: torch.tensor. Variational parameter with size (n,p)
-        C: torch.tensor. Model parameter with size (p,q)
-        beta: torch.tensor. Model parameter with size (d,p)
+        latent_mean: torch.tensor. Variational parameter with size (n,p)
+        latent_var: torch.tensor. Variational parameter with size (n,p)
+        components: torch.tensor. Model parameter with size (p,q)
+        coef: torch.tensor. Model parameter with size (d,p)
     Returns:
         torch.tensor of size 1 with a gradient.
     """
-    n = counts.shape[0]
-    rank = C.shape[1]
-    A = offsets + torch.mm(covariates, beta) + torch.mm(M, C.T)
-    SrondS = torch.multiply(S, S)
-    countsA = torch.sum(torch.multiply(counts, A))
-    moinsexpAplusSrondSCCT = torch.sum(
-        -torch.exp(A + 1 / 2 * torch.mm(SrondS, torch.multiply(C, C).T))
+    n_samples = counts.shape[0]
+    rank = components.shape[1]
+    log_intensity = offsets + covariates @ coef + latent_mean @ components.T
+    s_rond_s = torch.square(latent_var)
+    counts_log_intensity = torch.sum(counts * log_intensity)
+    minus_intensity_plus_s_rond_s_cct = torch.sum(
+        -torch.exp(log_intensity + 0.5 * s_rond_s @ (components * components).T)
+    )
+    minuslogs_rond_s = 0.5 * torch.sum(torch.log(s_rond_s))
+    mm_plus_s_rond_s = -0.5 * torch.sum(
+        torch.square(latent_mean) + torch.square(latent_var)
     )
-    moinslogSrondS = 1 / 2 * torch.sum(torch.log(SrondS))
-    MMplusSrondS = torch.sum(-1 / 2 * (torch.multiply(M, M) + torch.multiply(S, S)))
     log_stirlingcounts = torch.sum(log_stirling(counts))
     return (
-        countsA
-        + moinsexpAplusSrondSCCT
-        + moinslogSrondS
-        + MMplusSrondS
+        counts_log_intensity
+        + minus_intensity_plus_s_rond_s_cct
+        + minuslogs_rond_s
+        + mm_plus_s_rond_s
         - log_stirlingcounts
-        + n * rank / 2
-    )
+        + 0.5 * n_samples * rank
+    ) / n_samples
 
 
 ## should rename some variables so that is is clearer when we see the formula
-def ELBOZIPLN(counts, covariates, offsets, M, S, pi, Sigma, beta, B_zero, dirac):
+def elbo_zi_pln(
+    counts,
+    covariates,
+    offsets,
+    latent_mean,
+    latent_var,
+    pi,
+    covariance,
+    coef,
+    _coef_inflation,
+    dirac,
+):
     """Compute the ELBO (Evidence LOwer Bound) for the Zero Inflated PLN model.
     See the doc for more details on the computation.
 
@@ -117,50 +131,46 @@ def ELBOZIPLN(counts, covariates, offsets, M, S, pi, Sigma, beta, B_zero, dirac)
         counts: torch.tensor. Counts with size (n,p)
         0: torch.tensor. Offset, size (n,p)
         covariates: torch.tensor. Covariates, size (n,d)
-        M: torch.tensor. Variational parameter with size (n,p)
-        S: torch.tensor. Variational parameter with size (n,p)
+        latent_mean: torch.tensor. Variational parameter with size (n,p)
+        latent_var: torch.tensor. Variational parameter with size (n,p)
         pi: torch.tensor. Variational parameter with size (n,p)
-        Sigma: torch.tensor. Model parameter with size (p,p)
-        beta: torch.tensor. Model parameter with size (d,p)
-        B_zero: torch.tensor. Model parameter with size (d,p)
+        covariance: torch.tensor. Model parameter with size (p,p)
+        coef: torch.tensor. Model parameter with size (d,p)
+        _coef_inflation: torch.tensor. Model parameter with size (d,p)
     Returns:
         torch.tensor of size 1 with a gradient.
     """
     if torch.norm(pi * dirac - pi) > 0.0001:
         print("Bug")
         return False
-    n = counts.shape[0]
-    p = counts.shape[1]
-    SrondS = torch.multiply(S, S)
-    offsetsplusM = offsets + M
-    MmoinsXB = M - torch.mm(covariates, beta)
-    XB_zero = torch.mm(covariates, B_zero)
+    n_samples = counts.shape[0]
+    dim = counts.shape[1]
+    s_rond_s = torch.square(latent_var)
+    offsets_plus_m = offsets + latent_mean
+    m_minus_xb = latent_mean - covariates @ coef
+    x_coef_inflation = covariates @ _coef_inflation
     elbo = torch.sum(
-        torch.multiply(
-            1 - pi,
-            torch.multiply(counts, offsetsplusM)
-            - torch.exp(offsetsplusM + SrondS / 2)
+        (1 - pi)
+        * (
+            counts @ offsets_plus_m
+            - torch.exp(offsets_plus_m + s_rond_s / 2)
             - log_stirling(counts),
         )
         + pi
     )
 
-    elbo -= torch.sum(
-        torch.multiply(pi, trunc_log(pi)) + torch.multiply(1 - pi, trunc_log(1 - pi))
+    elbo -= torch.sum(pi * trunc_log(pi) + (1 - pi) * trunc_log(1 - pi))
+    elbo += torch.sum(
+        pi * x_coef_inflation - torch.log(1 + torch.exp(x_coef_inflation))
     )
-    elbo += torch.sum(torch.multiply(pi, XB_zero) - torch.log(1 + torch.exp(XB_zero)))
 
-    elbo -= (
-        1
-        / 2
-        * torch.trace(
-            torch.mm(
-                torch.inverse(Sigma),
-                torch.diag(torch.sum(SrondS, dim=0)) + torch.mm(MmoinsXB.T, MmoinsXB),
-            )
+    elbo -= 0.5 * torch.trace(
+        torch.mm(
+            torch.inverse(covariance),
+            torch.diag(torch.sum(s_rond_s, dim=0)) + m_minus_xb.T @ m_minus_xb,
         )
     )
-    elbo += n / 2 * torch.log(torch.det(Sigma))
-    elbo += n * p / 2
-    elbo += torch.sum(1 / 2 * torch.log(SrondS))
+    elbo += 0.5 * n_samples * torch.log(torch.det(covariance))
+    elbo += 0.5 * n_samples * dim
+    elbo += 0.5 * torch.sum(torch.log(s_rond_s))
     return elbo
diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4171d6aab7437a1cb56e7159090803ae32e990d
--- /dev/null
+++ b/pyPLNmodels/models.py
@@ -0,0 +1,998 @@
+import time
+from abc import ABC, abstractmethod
+import pickle
+import warnings
+import os
+
+import pandas as pd
+import torch
+import numpy as np
+import seaborn as sns
+import matplotlib.pyplot as plt
+from sklearn.decomposition import PCA
+
+
+from ._closed_forms import (
+    closed_formula_coef,
+    closed_formula_covariance,
+    closed_formula_pi,
+)
+from .elbos import elbo_plnpca, elbo_zi_pln, profiled_elbo_pln
+from ._utils import (
+    PLNPlotArgs,
+    init_sigma,
+    init_components,
+    init_coef,
+    check_two_dimensions_are_equal,
+    init_latent_mean,
+    format_data,
+    format_model_param,
+    check_data_shape,
+    nice_string_of_dict,
+    plot_ellipse,
+    closest,
+    prepare_covariates,
+    to_tensor,
+    check_dimensions_are_equal,
+)
+
+if torch.cuda.is_available():
+    DEVICE = "cuda"
+    print("Using a GPU")
+else:
+    DEVICE = "cpu"
+# shoudl add a good init for M. for pln we should not put
+# the maximum of the log posterior, for plnpca it may be ok.
+
+NB_CHARACTERS_FOR_NICE_PLOT = 70
+
+
+class _PLN(ABC):
+    """
+    Virtual class for all the PLN models.
+
+    This class must be derivatived. The methods `get_covariance`, `compute_elbo`,
+    `random_init_latent_parameters` and `list_of_parameters_needing_gradient` must
+    be defined.
+    """
+
+    WINDOW = 15
+    n_samples: int
+    dim: int
+    nb_cov: int
+    _counts: torch.Tensor
+    _covariates: torch.Tensor
+    _offsets: torch.Tensor
+    _coef: torch.Tensor
+    beginnning_time: float
+    _latent_var: torch.Tensor
+    _latent_mean: torch.Tensor
+
+    def __init__(self):
+        """
+        Simple initialization method.
+        """
+        self._fitted = False
+        self.plotargs = PLNPlotArgs(self.WINDOW)
+
+    def format_model_param(self, counts, covariates, offsets, offsets_formula):
+        self._counts, self._covariates, self._offsets = format_model_param(
+            counts, covariates, offsets, offsets_formula
+        )
+
+    @property
+    def nb_iteration_done(self):
+        return len(self.plotargs.elbos_list)
+
+    @property
+    def n_samples(self):
+        return self._counts.shape[0]
+
+    @property
+    def dim(self):
+        return self._counts.shape[1]
+
+    @property
+    def nb_cov(self):
+        return self.covariates.shape[1]
+
+    def smart_init_coef(self):
+        self._coef = init_coef(self._counts, self._covariates)
+
+    def random_init_coef(self):
+        self._coef = torch.randn((self.nb_cov, self.dim), device=DEVICE)
+
+    @abstractmethod
+    def random_init_model_parameters(self):
+        pass
+
+    @abstractmethod
+    def smart_init_model_parameters(self):
+        pass
+
+    @abstractmethod
+    def random_init_latent_parameters(self):
+        pass
+
+    def smart_init_latent_parameters(self):
+        pass
+
+    def init_parameters(self, do_smart_init):
+        print("Initialization ...")
+        if do_smart_init:
+            self.smart_init_model_parameters()
+            self.smart_init_latent_parameters()
+        else:
+            self.random_init_model_parameters()
+            self.random_init_latent_parameters()
+        print("Initialization finished")
+        self.put_parameters_to_device()
+
+    def put_parameters_to_device(self):
+        for parameter in self.list_of_parameters_needing_gradient:
+            parameter.requires_grad_(True)
+
+    @property
+    def list_of_parameters_needing_gradient(self):
+        """
+        A list containing all the parameters that needs to be upgraded via a gradient step.
+        """
+
+    def fit(
+        self,
+        counts,
+        covariates=None,
+        offsets=None,
+        nb_max_iteration=50000,
+        lr=0.01,
+        class_optimizer=torch.optim.Rprop,
+        tol=1e-6,
+        do_smart_init=True,
+        verbose=False,
+        offsets_formula="logsum",
+        keep_going=False,
+    ):
+        """
+        Main function of the class. Fit a PLN to the data.
+        Parameters
+        ----------
+        counts : torch.tensor or ndarray or DataFrame.
+            2-d count data.
+        covariates : torch.tensor or ndarray or DataFrame or
+            None, default = None
+            If not `None`, the first dimension should equal the first
+            dimension of `counts`.
+        offsets : torch.tensor or ndarray or DataFrame or None, default = None
+            Model offset. If not `None`, size should be the same as `counts`.
+        """
+        self.print_beginning_message()
+        self.beginnning_time = time.time()
+
+        if keep_going is False:
+            self.format_model_param(counts, covariates, offsets, offsets_formula)
+            check_data_shape(self._counts, self._covariates, self._offsets)
+            self.init_parameters(do_smart_init)
+        if self._fitted is True and keep_going is True:
+            self.beginnning_time -= self.plotargs.running_times[-1]
+        self.optim = class_optimizer(self.list_of_parameters_needing_gradient, lr=lr)
+        stop_condition = False
+        while self.nb_iteration_done < nb_max_iteration and stop_condition == False:
+            loss = self.trainstep()
+            criterion = self.compute_criterion_and_update_plotargs(loss, tol)
+            if abs(criterion) < tol:
+                stop_condition = True
+            if verbose and self.nb_iteration_done % 50 == 0:
+                self.print_stats()
+        self.print_end_of_fitting_message(stop_condition, tol)
+        self._fitted = True
+
+    def trainstep(self):
+        """
+        simple docstrings with black errors
+        """
+        self.optim.zero_grad()
+        loss = -self.compute_elbo()
+        loss.backward()
+        self.optim.step()
+        self.update_closed_forms()
+        return loss
+
+    def pca_projected_latent_variables(self, n_components=None):
+        if n_components is None:
+            n_components = self.get_max_components()
+        if n_components > self.dim:
+            raise RuntimeError(
+                f"You ask more components ({n_components}) than variables ({self.dim})"
+            )
+        pca = PCA(n_components=n_components)
+        return pca.fit_transform(self.latent_variables.detach().cpu())
+
+    @property
+    @abstractmethod
+    def latent_variables(self):
+        pass
+
+    def print_end_of_fitting_message(self, stop_condition, tol):
+        if stop_condition is True:
+            print(
+                f"Tolerance {tol} reached"
+                f"n {self.plotargs.iteration_number} iterations"
+            )
+        else:
+            print(
+                "Maximum number of iterations reached : ",
+                self.plotargs.iteration_number,
+                "last criterion = ",
+                np.round(self.plotargs.criterions[-1], 8),
+            )
+
+    def print_stats(self):
+        print("-------UPDATE-------")
+        print("Iteration number: ", self.plotargs.iteration_number)
+        print("Criterion: ", np.round(self.plotargs.criterions[-1], 8))
+        print("ELBO:", np.round(self.plotargs.elbos_list[-1], 6))
+
+    def compute_criterion_and_update_plotargs(self, loss, tol):
+        self.plotargs.elbos_list.append(-loss.item())
+        self.plotargs.running_times.append(time.time() - self.beginnning_time)
+        if self.plotargs.iteration_number > self.WINDOW:
+            criterion = abs(
+                self.plotargs.elbos_list[-1]
+                - self.plotargs.elbos_list[-1 - self.WINDOW]
+            )
+            self.plotargs.criterions.append(criterion)
+            return criterion
+        return tol
+
+    def update_closed_forms(self):
+        pass
+
+    @abstractmethod
+    def compute_elbo(self):
+        """
+        Compute the Evidence Lower BOund (ELBO) that will be maximized
+        by pytorch.
+        """
+
+    def display_covariance(self, ax=None, savefig=False, name_file=""):
+        """
+        Display a heatmap of covariance to visualize correlations.
+
+        If covariance is too big (size is > 400), will only display the
+        first block of size (400,400).
+
+        Parameters
+        ----------
+        ax : matplotlib Axes, optional
+            Axes in which to draw the plot, otherwise use the
+            currently-active Axes.
+        savefig: bool, optional
+            If True the figure will be saved. Default is False.
+        name_file : str, optional
+            The name of the file the graphic will be saved to if saved.
+            Default is an empty string.
+        """
+        if self.dim > 400:
+            warnings.warn("Only displaying the first 400 variables.")
+            sigma = sigma[:400, :400]
+            sns.heatmap(self.covariance[:400, :400], ax=ax)
+        else:
+            sns.heatmap(self.covariance, ax=ax)
+        if savefig:
+            plt.savefig(name_file + self.NAME)
+        plt.show()  # to avoid displaying a blanck screen
+
+    def __str__(self):
+        delimiter = "=" * NB_CHARACTERS_FOR_NICE_PLOT
+        string = f"A multivariate Poisson Lognormal with {self.description} \n"
+        string += f"{delimiter}\n"
+        string += nice_string_of_dict(self.dict_for_printing)
+        string += f"{delimiter}\n"
+        string += "* Useful properties\n"
+        string += f"    {self.useful_properties_string}\n"
+        string += "* Useful methods\n"
+        string += f"    {self.useful_methods_string}\n"
+        string += f"* Additional properties for {self.NAME}\n"
+        string += f"    {self.additional_properties_string}\n"
+        string += f"* Additionial methods for {self.NAME}\n"
+        string += f"    {self.additional_methods_string}"
+        return string
+
+    @property
+    def additional_methods_string(self):
+        pass
+
+    @property
+    def additional_properties_string(self):
+        pass
+
+    def show(self, axes=None):
+        print("Likelihood:", -self.loglike)
+        if self._fitted is False:
+            nb_axes = 1
+        else:
+            nb_axes = 3
+        if axes is None:
+            _, axes = plt.subplots(1, nb_axes, figsize=(23, 5))
+        if self._fitted is True:
+            self.plotargs.show_loss(ax=axes[2])
+            self.plotargs.show_stopping_criterion(ax=axes[1])
+            self.display_covariance(ax=axes[0])
+        else:
+            self.display_covariance(ax=axes)
+        plt.show()
+
+    @property
+    def elbos_list(self):
+        return self.plotargs.elbos_list
+
+    @property
+    def loglike(self):
+        if self._fitted is False:
+            t0 = time.time()
+            self.plotargs.elbos_list.append(self.compute_elbo().item())
+            self.plotargs.running_times.append(time.time() - t0)
+        return self.n_samples * self.elbos_list[-1]
+
+    @property
+    def BIC(self):
+        return -self.loglike + self.number_of_parameters / 2 * np.log(self.n_samples)
+
+    @property
+    def AIC(self):
+        return -self.loglike + self.number_of_parameters
+
+    @property
+    def latent_parameters(self):
+        return {"latent_var": self.latent_var, "latent_mean": self.latent_mean}
+
+    @property
+    def model_parameters(self):
+        return {"coef": self.coef, "covariance": self.covariance}
+
+    @property
+    def dict_data(self):
+        return {
+            "counts": self.counts,
+            "covariates": self.covariates,
+            "offsets": self.offsets,
+        }
+
+    @property
+    def model_in_a_dict(self):
+        return self.dict_data | self.model_parameters | self.latent_parameters
+
+    @property
+    def coef(self):
+        return self.attribute_or_none("_coef")
+
+    @property
+    def latent_mean(self):
+        return self.attribute_or_none("_latent_mean")
+
+    @property
+    def latent_var(self):
+        return self.attribute_or_none("_latent_var")
+
+    @latent_var.setter
+    def latent_var(self, latent_var):
+        self._latent_var = latent_var
+
+    @latent_mean.setter
+    def latent_mean(self, latent_mean):
+        self._latent_mean = latent_mean
+
+    def attribute_or_none(self, attribute_name):
+        if hasattr(self, attribute_name):
+            attr = getattr(self, attribute_name)
+            if isinstance(attr, torch.Tensor):
+                return attr.detach().cpu()
+            return attr
+        return None
+
+    def save(self, path_of_directory="./"):
+        path = f"{path_of_directory}/{self.model_path}/"
+        os.makedirs(path, exist_ok=True)
+        for key, value in self.model_in_a_dict.items():
+            filename = f"{path}/{key}.csv"
+            if isinstance(value, torch.Tensor):
+                pd.DataFrame(np.array(value.cpu().detach())).to_csv(
+                    filename, header=None, index=None
+                )
+            else:
+                pd.DataFrame(np.array([value])).to_csv(
+                    filename, header=None, index=None
+                )
+        self._fitted = True
+
+    def load(self, path_of_directory="./"):
+        path = f"{path_of_directory}/{self.model_path}/"
+        for key, value in self.model_in_a_dict.items():
+            value = torch.from_numpy(
+                pd.read_csv(path + key + ".csv", header=None).values
+            )
+            setattr(self, key, value)
+        self.put_parameters_to_device()
+
+    @property
+    def counts(self):
+        return self.attribute_or_none("_counts")
+
+    @property
+    def offsets(self):
+        return self.attribute_or_none("_offsets")
+
+    @property
+    def covariates(self):
+        return self.attribute_or_none("_covariates")
+
+    @counts.setter
+    def counts(self, counts):
+        counts = to_tensor(counts)
+        if hasattr(self, "_counts"):
+            check_dimensions_are_equal(self._counts, counts)
+        self._counts = counts
+
+    @offsets.setter
+    def offsets(self, offsets):
+        self._offsets = offsets
+
+    @covariates.setter
+    def covariates(self, covariates):
+        self._covariates = covariates
+
+    @coef.setter
+    def coef(self, coef):
+        self._coef = coef
+
+    @property
+    def dict_for_printing(self):
+        return {
+            "Loglike": np.round(self.loglike, 2),
+            "Dimension": self.dim,
+            "Nb param": int(self.number_of_parameters),
+            "BIC": int(self.BIC),
+            "AIC": int(self.AIC),
+        }
+
+    @property
+    def optim_parameters(self):
+        return {"Number of iterations done": self.nb_iteration_done}
+
+    @property
+    def useful_properties_string(self):
+        return ".latent_variables, .model_parameters, .latent_parameters, \
+.optim_parameters"
+
+    @property
+    def useful_methods_string(self):
+        return ".show(), .coef() .transform(), .sigma(), .predict(), \
+.pca_projected_latent_variables()"
+
+    def sigma(self):
+        return self.covariance
+
+    def predict(self, covariates=None):
+        if isinstance(covariates, torch.Tensor):
+            if covariates.shape[-1] != self.nb_cov - 1:
+                error_string = f"X has wrong shape ({covariates.shape}).Should"
+                error_string += f" be ({self.n_samples, self.nb_cov-1})."
+                raise RuntimeError(error_string)
+        covariates_with_ones = prepare_covariates(covariates, self.n_samples)
+        return covariates_with_ones @ self.coef
+
+
+# need to do a good init for M and S
+class PLN(_PLN):
+    NAME = "PLN"
+    coef: torch.Tensor
+
+    @property
+    def description(self):
+        return "full covariance model."
+
+    @property
+    def coef(self):
+        if hasattr(self, "_latent_mean") and hasattr(self, "_covariates"):
+            return self._coef
+        return None
+
+    @coef.setter
+    def coef(self, coef):
+        pass
+
+    def smart_init_latent_parameters(self):
+        self.random_init_latent_parameters()
+
+    def random_init_latent_parameters(self):
+        self._latent_var = 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE)
+        self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE)
+
+    @property
+    def model_path(self):
+        return self.NAME
+
+    @property
+    def list_of_parameters_needing_gradient(self):
+        return [self._latent_mean, self._latent_var]
+
+    def get_max_components(self):
+        return self.dim
+
+    def compute_elbo(self):
+        """
+        Compute the Evidence Lower BOund (ELBO) that will be
+        maximized by pytorch. Here we use the profiled ELBO
+        for the full covariance matrix.
+        """
+        return profiled_elbo_pln(
+            self._counts,
+            self._covariates,
+            self._offsets,
+            self._latent_mean,
+            self._latent_var,
+        )
+
+    def smart_init_model_parameters(self):
+        # no model parameters since we are doing a profiled ELBO
+        pass
+
+    def random_init_model_parameters(self):
+        # no model parameters since we are doing a profiled ELBO
+        pass
+
+    @property
+    def _coef(self):
+        return closed_formula_coef(self._covariates, self._latent_mean)
+
+    @property
+    def _covariance(self):
+        return closed_formula_covariance(
+            self._covariates,
+            self._latent_mean,
+            self._latent_var,
+            self._coef,
+            self.n_samples,
+        )
+
+    def print_beginning_message(self):
+        print(f"Fitting a PLN model with {self.description}")
+
+    @property
+    def latent_variables(self):
+        return self.latent_mean
+
+    @property
+    def number_of_parameters(self):
+        return self.dim * (self.dim + self.nb_cov)
+
+    def transform(self):
+        return self.latent_variables
+
+    @property
+    def covariance(self):
+        if all(
+            hasattr(self, attr)
+            for attr in [
+                "_covariates",
+                "_latent_mean",
+                "_latent_var",
+                "_coef",
+                "n_samples",
+            ]
+        ):
+            return self._covariance.detach()
+        return None
+
+    @covariance.setter
+    def covariance(self, covariance):
+        pass
+
+
+class PLNPCA:
+    def __init__(self, ranks):
+        if isinstance(ranks, (list, np.ndarray)):
+            self.ranks = ranks
+            self.dict_models = {}
+            for rank in ranks:
+                if isinstance(rank, (int, np.int64)):
+                    self.dict_models[rank] = _PLNPCA(rank)
+                else:
+                    raise TypeError(
+                        "Please instantiate with either a list\
+                              of integers or an integer."
+                    )
+        elif isinstance(ranks, int):
+            self.ranks = [ranks]
+            self.dict_models = {ranks: _PLNPCA(ranks)}
+        else:
+            raise TypeError(
+                "Please instantiate with either a list of \
+                        integers or an integer."
+            )
+
+    @property
+    def models(self):
+        return list(self.dict_models.values())
+
+    def print_beginning_message(self):
+        return f"Adjusting {len(self.ranks)} PLN models for PCA analysis \n"
+
+    @property
+    def dim(self):
+        return self[self.ranks[0]].dim
+
+    ## should do something for this weird init. pb: if doing the init of self._counts etc
+    ## only in PLNPCA, then we don't do it for each _PLNPCA but then PLN is not doing it.
+    def fit(
+        self,
+        counts,
+        covariates=None,
+        offsets=None,
+        nb_max_iteration=100000,
+        lr=0.01,
+        class_optimizer=torch.optim.Rprop,
+        tol=1e-6,
+        do_smart_init=True,
+        verbose=False,
+        offsets_formula="logsum",
+        keep_going=False,
+    ):
+        self.print_beginning_message()
+        counts, _, offsets = format_model_param(
+            counts, covariates, offsets, offsets_formula
+        )
+        for pca in self.dict_models.values():
+            pca.fit(
+                counts,
+                covariates,
+                offsets,
+                nb_max_iteration,
+                lr,
+                class_optimizer,
+                tol,
+                do_smart_init,
+                verbose,
+                None,
+                keep_going,
+            )
+        self.print_ending_message()
+
+    def print_ending_message(self):
+        delimiter = "=" * NB_CHARACTERS_FOR_NICE_PLOT
+        print(f"{delimiter}\n")
+        print("DONE!")
+        print(f"    Best model(lower BIC): {self.criterion_dict('BIC')}\n ")
+        print(f"    Best model(lower AIC): {self.criterion_dict('AIC')}\n ")
+        print(f"{delimiter}\n")
+
+    def criterion_dict(self, criterion="AIC"):
+        return self.best_model(criterion).rank
+
+    def __getitem__(self, rank):
+        if (rank in self.ranks) is False:
+            asked_rank = rank
+            rank = closest(self.ranks, asked_rank)
+            warning_string = " \n No such a model in the collection."
+            warning_string += "Returning model with closest value.\n"
+            warning_string += f"Requested: {asked_rank}, returned: {rank}"
+            warnings.warn(message=warning_string)
+        return self.dict_models[rank]
+
+    @property
+    def BIC(self):
+        return {model.rank: int(model.BIC) for model in self.models}
+
+    @property
+    def AIC(self):
+        return {model.rank: int(model.AIC) for model in self.models}
+
+    @property
+    def loglikes(self):
+        return {model.rank: model.loglike for model in self.models}
+
+    def show(self):
+        bic = self.BIC
+        aic = self.AIC
+        loglikes = self.loglikes
+        bic_color = "blue"
+        aic_color = "red"
+        loglikes_color = "orange"
+        plt.scatter(bic.keys(), bic.values(), label="BIC criterion", c=bic_color)
+        plt.plot(bic.keys(), bic.values(), c=bic_color)
+        plt.axvline(self.best_BIC_model_rank, c=bic_color, linestyle="dotted")
+        plt.scatter(aic.keys(), aic.values(), label="AIC criterion", c=aic_color)
+        plt.axvline(self.best_AIC_model_rank, c=aic_color, linestyle="dotted")
+        plt.plot(aic.keys(), aic.values(), c=aic_color)
+        plt.xticks(list(aic.keys()))
+        plt.scatter(
+            loglikes.keys(),
+            -np.array(list(loglikes.values())),
+            label="Negative log likelihood",
+            c=loglikes_color,
+        )
+        plt.plot(loglikes.keys(), -np.array(list(loglikes.values())), c=loglikes_color)
+        plt.legend()
+        plt.show()
+
+    @property
+    def best_BIC_model_rank(self):
+        return self.ranks[np.argmin(list(self.BIC.values()))]
+
+    @property
+    def best_AIC_model_rank(self):
+        return self.ranks[np.argmin(list(self.AIC.values()))]
+
+    def best_model(self, criterion="AIC"):
+        if criterion == "BIC":
+            return self[self.best_BIC_model_rank]
+        if criterion == "AIC":
+            return self[self.best_AIC_model_rank]
+        raise ValueError(f"Unknown criterion {criterion}")
+
+    def save(self, path_of_directory="./"):
+        for model in self.models:
+            model.save(path_of_directory)
+
+    def load(self, path_of_directory="./"):
+        for model in self.models:
+            model.load(path_of_directory)
+
+    @property
+    def n_samples(self):
+        return self.models[0].n_samples
+
+    @property
+    def _p(self):
+        return self[self.ranks[0]].p
+
+    def __str__(self):
+        nb_models = len(self.models)
+        delimiter = "\n" + "-" * NB_CHARACTERS_FOR_NICE_PLOT + "\n"
+        to_print = delimiter
+        to_print += f"Collection of {nb_models} PLNPCA models with \
+                    {self.dim} variables."
+        to_print += delimiter
+        to_print += f" - Ranks considered:{self.ranks}\n"
+        dict_bic = {"rank": "criterion"} | self.BIC
+        to_print += f" - BIC metric:\n{nice_string_of_dict(dict_bic)}\n"
+
+        dict_to_print = self.best_model(criterion="BIC")._rank
+        to_print += f"   Best model(lower BIC): {dict_to_print}\n \n"
+        dict_aic = {"rank": "criterion"} | self.AIC
+        to_print += f" - AIC metric:\n{nice_string_of_dict(dict_aic)}\n"
+        to_print += f"   Best model(lower AIC): \
+                {self.best_model(criterion = 'AIC')._rank}\n"
+        to_print += delimiter
+        to_print += f"* Useful properties\n"
+        to_print += f"    {self.useful_properties_string}\n"
+        to_print += "* Useful methods \n"
+        to_print += f"    {self.useful_methods_string}"
+        to_print += delimiter
+        return to_print
+
+    @property
+    def useful_methods_string(self):
+        return ".show(), .best_model()"
+
+    @property
+    def useful_properties_string(self):
+        return ".BIC, .AIC, .loglikes"
+
+
+class _PLNPCA(_PLN):
+    NAME = "PLNPCA"
+    _components: torch.Tensor
+
+    def __init__(self, rank):
+        super().__init__()
+        self._rank = rank
+
+    def init_parameters(self, do_smart_init):
+        if self.dim < self._rank:
+            warning_string = f"\nThe requested rank of approximation {self._rank} \
+                is greater than the number of variables {self.dim}. \
+                Setting rank to {self.dim}"
+            warnings.warn(warning_string)
+            self._rank = self.dim
+        super().init_parameters(do_smart_init)
+
+    @property
+    def model_path(self):
+        return f"{self.NAME}_{self._rank}_rank"
+
+    @property
+    def rank(self):
+        return self._rank
+
+    def get_max_components(self):
+        return self._rank
+
+    def print_beginning_message(self):
+        print("-" * NB_CHARACTERS_FOR_NICE_PLOT)
+        print(f"Fitting a PLNPCA model with {self._rank} components")
+
+    @property
+    def model_parameters(self):
+        return {"coef": self.coef, "components": self.components}
+
+    def smart_init_model_parameters(self):
+        super().smart_init_coef()
+        self._components = init_components(
+            self._counts, self._covariates, self._coef, self._rank
+        )
+
+    def random_init_model_parameters(self):
+        super().random_init_coef()
+        self._components = torch.randn((self.dim, self._rank)).to(DEVICE)
+
+    def random_init_latent_parameters(self):
+        self._latent_var = 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE)
+        self._latent_mean = torch.ones((self.n_samples, self._rank)).to(DEVICE)
+
+    def smart_init_latent_parameters(self):
+        self._latent_mean = (
+            init_latent_mean(
+                self._counts,
+                self._covariates,
+                self._offsets,
+                self._coef,
+                self._components,
+            )
+            .to(DEVICE)
+            .detach()
+        )
+        self._latent_var = 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE)
+        self._latent_mean.requires_grad_(True)
+        self._latent_var.requires_grad_(True)
+
+    @property
+    def list_of_parameters_needing_gradient(self):
+        return [self._components, self._coef, self._latent_mean, self._latent_var]
+
+    def compute_elbo(self):
+        return elbo_plnpca(
+            self._counts,
+            self._covariates,
+            self._offsets,
+            self._latent_mean,
+            self._latent_var,
+            self._components,
+            self._coef,
+        )
+
+    @property
+    def number_of_parameters(self):
+        return self.dim * (self.nb_cov + self._rank) - self._rank * (self._rank - 1) / 2
+
+    @property
+    def additional_properties_string(self):
+        return ".projected_latent_variables"
+
+    @property
+    def additional_methods_string(self):
+        string = "    only for rank=2: .viz()"
+        return string
+
+    @property
+    def covariance(self):
+        if hasattr(self, "_components"):
+            cov_latent = self._latent_mean.T @ self._latent_mean
+            cov_latent += torch.diag(torch.sum(torch.square(self._latent_var), dim=0))
+            cov_latent /= self.n_samples
+            return (self._components @ cov_latent @ self._components.T).detach()
+        return None
+
+    @property
+    def description(self):
+        return f" {self.rank} principal component."
+
+    @property
+    def latent_variables(self):
+        return torch.matmul(self._latent_mean, self._components.T)
+
+    @property
+    def projected_latent_variables(self):
+        ortho_components = torch.linalg.qr(self._components, "reduced")[0]
+        return torch.mm(self.latent_variables, ortho_components).detach().cpu()
+
+    @property
+    def components(self):
+        return self.attribute_or_none("_components")
+
+    @components.setter
+    def components(self, components):
+        self._components = components
+
+    def viz(self, ax=None, colors=None):
+        if self._rank != 2:
+            raise RuntimeError("Can't perform visualization for rank != 2.")
+        if ax is None:
+            ax = plt.gca()
+        proj_variables = self.projected_latent_variables
+        x = proj_variables[:, 0].cpu().numpy()
+        y = proj_variables[:, 1].cpu().numpy()
+        sns.scatterplot(x=x, y=y, hue=colors, ax=ax)
+        covariances = torch.diag_embed(self._latent_var**2).detach().cpu()
+        for i in range(covariances.shape[0]):
+            plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax)
+        return ax
+
+    def transform(self, project=True):
+        if project is True:
+            return self.projected_latent_variables
+        return self.latent_variables
+
+
+class ZIPLN(PLN):
+    NAME = "ZIPLN"
+
+    _pi: torch.Tensor
+    _coef_inflation: torch.Tensor
+    _dirac: torch.Tensor
+
+    @property
+    def description(self):
+        return "with full covariance model and zero-inflation."
+
+    def random_init_model_parameters(self):
+        super().random_init_model_parameters()
+        self._coef_inflation = torch.randn(self.nb_cov, self.dim)
+        self._covariance = torch.diag(torch.ones(self.dim)).to(DEVICE)
+
+    # should change the good initialization, especially for _coef_inflation
+    def smart_init_model_parameters(self):
+        super().smart_init_model_parameters()
+        self._covariance = init_sigma(
+            self._counts, self._covariates, self._offsets, self._coef
+        )
+        self._coef_inflation = torch.randn(self.nb_cov, self.dim)
+
+    def random_init_latent_parameters(self):
+        self._dirac = self._counts == 0
+        self._latent_mean = torch.randn(self.n_samples, self.dim)
+        self._latent_var = torch.randn(self.n_samples, self.dim)
+        self._pi = (
+            torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE)
+            * self._dirac
+        )
+
+    def compute_elbo(self):
+        return elbo_zi_pln(
+            self._counts,
+            self._covariates,
+            self._offsets,
+            self._latent_mean,
+            self._latent_var,
+            self._pi,
+            self._covariance,
+            self._coef,
+            self._coef_inflation,
+            self._dirac,
+        )
+
+    @property
+    def list_of_parameters_needing_gradient(self):
+        return [self._latent_mean, self._latent_var, self._coef_inflation]
+
+    def update_closed_forms(self):
+        self._coef = closed_formula_coef(self._covariates, self._latent_mean)
+        self._covariance = closed_formula_covariance(
+            self._covariates,
+            self._latent_mean,
+            self._latent_var,
+            self._coef,
+            self.n_samples,
+        )
+        self._pi = closed_formula_pi(
+            self._offsets,
+            self._latent_mean,
+            self._latent_var,
+            self._dirac,
+            self._covariates,
+            self._coef_inflation,
+        )
+
+    @property
+    def number_of_parameters(self):
+        return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2)
diff --git a/pyPLNmodels/oaks.py b/pyPLNmodels/oaks.py
new file mode 100644
index 0000000000000000000000000000000000000000..6676d572dd25bfdd053ebf209c67b2ae1867bb42
--- /dev/null
+++ b/pyPLNmodels/oaks.py
@@ -0,0 +1,48 @@
+import pkg_resources
+import pandas as pd
+
+
+def load_oaks():
+    """Oaks amplicon data set
+
+    This data set gives the abundance of 114 taxa (66 bacterial OTU,
+    48 fungal OTUs) in 116 samples (leafs).
+
+    A 114 taxa by 116 samples offset matrix is also given, based on the total number of reads
+    found in each sample, which depend on the technology used for either
+    bacteria (16S) or fungi (ITS1).
+
+    For each sample, 3 additional covariates (tree, dist2ground, orientation) are known.
+
+    The data is provided as dictionary with the following keys
+        counts          a 114 x 116 np.array of integer (counts)
+        offsets         a 114 x 116 np.array of integer (offsets)
+        tree            a 114 x 1 vector of character for the tree status with respect to the pathogen (susceptible, intermediate or resistant)
+        dist2ground     a 114 x 1 vector encoding the distance of the sampled leaf to the base of the ground
+        orientation     a 114 x 1 vector encoding the orientation of the branch (South-West SW or North-East NE)
+
+    Source: data from B. Jakuschkin and coauthors.
+
+    References:
+
+     Jakuschkin, B., Fievet, V., Schwaller, L. et al. Deciphering the
+     Pathobiome: Intra- and Interkingdom Interactions Involving the
+     Pathogen Erysiphe alphitoides . Microb Ecol 72, 870–880 (2016).
+     doi:10.1007/s00248-016-0777-x
+    """
+    counts_stream = pkg_resources.resource_stream(__name__, "data/oaks/counts.csv")
+    offsets_stream = pkg_resources.resource_stream(__name__, "data/oaks/offsets.csv")
+    covariates_stream = pkg_resources.resource_stream(
+        __name__, "data/oaks/covariates.csv"
+    )
+    counts = pd.read_csv(counts_stream)
+    offsets = pd.read_csv(offsets_stream)
+    covariates = pd.read_csv(covariates_stream)
+    oaks = {
+        "counts": counts.to_numpy(),
+        "offsets": offsets.to_numpy(),
+        "tree": covariates.tree.to_numpy(),
+        "dist2ground": covariates.distTOground.to_numpy(),
+        "orientation": covariates.orientation.to_numpy(),
+    }
+    return oaks
diff --git a/setup.py b/setup.py
index 19511c9442571bf75cdd3bb359c1d86c5ba8cf99..74cc89090df8d4d49d7a9b99b19d1696b04d3de1 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 from setuptools import setup, find_packages
 
-VERSION = "0.0.34"
+VERSION = "0.0.37"
 
 with open("README.md", "r") as fh:
     long_description = fh.read()
@@ -34,7 +34,7 @@ setup(
     py_modules=[
         "pyPLNmodels._utils",
         "pyPLNmodels.elbos",
-        "pyPLNmodels.VEM",
+        "pyPLNmodels.models",
         "pyPLNmodels._closed_forms",
     ],
     long_description_content_type="text/markdown",
@@ -54,4 +54,6 @@ setup(
         # that you indicate whether you support Python 2, Python 3 or both.
         "Programming Language :: Python :: 3 :: Only",
     ],
+    include_package_data=True,
+    package_data={"": ["data/oaks/*.csv"]},
 )
diff --git a/test.py b/test.py
index c052914130ad8bd060afa2c0b2d94770b1c1448c..900e42df2c9dd4070e663d898b93b219d4fa3734 100644
--- a/test.py
+++ b/test.py
@@ -1,21 +1,25 @@
-from pyPLNmodels.VEM import PLN, PLNPCA
-import torch
-import numpy as np
-import pandas as pd
+from pyPLNmodels.models import PLNPCA, _PLNPCA, PLN
+from pyPLNmodels import get_real_count_data, get_simulated_count_data
 
-if torch.cuda.is_available():
-    DEVICE = "cuda"
-else:
-    DEVICE = "cpu"
+import os
 
-Y = pd.read_csv("./example_data/real_data/oaks_counts.csv")
+os.chdir("./pyPLNmodels/")
+
+
+counts = get_real_count_data()
 covariates = None
-O = np.log(pd.read_csv("./example_data/real_data/oaks_offsets.csv"))
+offsets = None
+# counts, covariates, offsets = get_simulated_count_data(seed = 0)
+
+pca = PLNPCA([3, 4])
+
+pca.fit(counts, covariates, offsets, tol=0.1)
+print(pca)
 
-pln = PLN()
-pln.fit(Y, covariates, O)
-print(pln)
+# pln = PLN()
+# pcamodel = pca.best_model()
+# pcamodel.save()
+# model = PLNPCA([4])[4]
 
-pca = PLNPCA(ranks=[4, 5])
-pca.fit(Y, covariates, O, tol=0.1)
-print(pca.best_model())
+# model.load()
+# # pln.fit(counts, covariates, offsets, tol=0.1)
diff --git a/tests/test_args.py b/tests/test_args.py
index fe0434ece28515883da41a27d8cc6435262e0429..16c8a73d7f0c354e7691ed0b66c734518fe083ca 100644
--- a/tests/test_args.py
+++ b/tests/test_args.py
@@ -1,20 +1,51 @@
-from pyPLNmodels.VEM import PLN, PLNPCA
+import os
+
+from pyPLNmodels.models import PLN, PLNPCA, _PLNPCA
+from pyPLNmodels import get_simulated_count_data, get_real_count_data
 import pytest
-from pytest_lazyfixture import lazy_fixture
+from pytest_lazyfixture import lazy_fixture as lf
+import pandas as pd
 import numpy as np
-from tests.utils import get_simulated_data, get_real_data, MSE
 
-Y_sim, covariates_sim, O_sim, true_Sigma, true_beta = get_simulated_data()
+(
+    counts_sim,
+    covariates_sim,
+    offsets_sim,
+) = get_simulated_count_data(nb_cov=2)
 
-RANKS = [4, 8]
-print("ca marche")
+couts_real = get_real_count_data(n_samples=298, dim=101)
+RANKS = [2, 8]
 
 
 @pytest.fixture
-def my_instance_plnpca():
+def instance_plnpca():
     plnpca = PLNPCA(ranks=RANKS)
     return plnpca
 
 
-def test_pandas_init(my_instance_plnpca):
-    my_instance_plnpca.fit(Y_sim, covariates_sim, O_sim)
+@pytest.fixture
+def instance__plnpca():
+    model = _PLNPCA(rank=RANKS[0])
+    return model
+
+
+@pytest.fixture
+def instance_pln_full():
+    return PLN()
+
+
+all_instances = [lf("instance_plnpca"), lf("instance__plnpca"), lf("instance_pln_full")]
+
+
+@pytest.mark.parametrize("instance", all_instances)
+def test_pandas_init(instance):
+    instance.fit(
+        pd.DataFrame(counts_sim.numpy()),
+        pd.DataFrame(covariates_sim.numpy()),
+        pd.DataFrame(offsets_sim.numpy()),
+    )
+
+
+@pytest.mark.parametrize("instance", all_instances)
+def test_numpy_init(instance):
+    instance.fit(counts_sim.numpy(), covariates_sim.numpy(), offsets_sim.numpy())
diff --git a/tests/test_common.py b/tests/test_common.py
index f9c8e22c60dccd5746d782cd0650b4bb2db2d61d..7e0e6c955a074e8169b643f014bca59583e32d01 100644
--- a/tests/test_common.py
+++ b/tests/test_common.py
@@ -1,137 +1,336 @@
 import torch
 import numpy as np
-from pyPLNmodels.VEM import PLN, _PLNPCA
-from tests.utils import get_simulated_data, get_real_data, MSE
+import pandas as pd
+
+from pyPLNmodels.models import PLN, _PLNPCA
+from pyPLNmodels import get_simulated_count_data, get_real_count_data
+from tests.utils import MSE
 
 import pytest
 from pytest_lazyfixture import lazy_fixture as lf
+import os
 
-Y_sim, covariates_sim, O_sim, true_Sigma, true_beta = get_simulated_data()
+(
+    counts_sim,
+    covariates_sim,
+    offsets_sim,
+    true_covariance,
+    true_coef,
+) = get_simulated_count_data(return_true_param=True, nb_cov=2)
 
 
-Y_real, covariates_real, O_real = get_real_data()
-O_real = np.log(O_real)
+counts_real = get_real_count_data()
 rank = 8
 
 
 @pytest.fixture
-def my_instance_pln():
-    pln = PLN()
-    return pln
+def instance_pln_full():
+    pln_full = PLN()
+    return pln_full
 
 
 @pytest.fixture
-def my_instance__plnpca():
+def instance__plnpca():
     plnpca = _PLNPCA(rank=rank)
     return plnpca
 
 
 @pytest.fixture
-def my_simulated_fitted_pln():
-    pln = PLN()
-    pln.fit(Y_sim, covariates_sim, O_sim)
-    return pln
+def simulated_fitted_pln_full():
+    pln_full = PLN()
+    pln_full.fit(counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim)
+    return pln_full
 
 
 @pytest.fixture
-def my_real_fitted_pln():
-    pln = PLN()
-    pln.fit(Y_real, covariates_real, O_real)
-    return pln
+def simulated_fitted__plnpca():
+    plnpca = _PLNPCA(rank=rank)
+    plnpca.fit(counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim)
+    return plnpca
 
 
 @pytest.fixture
-def my_real_fitted__plnpca():
-    plnpca = _PLNPCA(rank=rank)
-    plnpca.fit(Y_real, covariates_real, O_real)
-    return plnpca
+def loaded_simulated_pln_full(simulated_fitted_pln_full):
+    simulated_fitted_pln_full.save()
+    loaded_pln_full = PLN()
+    loaded_pln_full.load()
+    return loaded_pln_full
 
 
 @pytest.fixture
-def my_simulated_fitted__plnpca():
-    plnpca = _PLNPCA(rank=rank)
-    plnpca.fit(Y_sim, covariates_sim, O_sim)
-    return plnpca
+def loaded_refit_simulated_pln_full(loaded_simulated_pln_full):
+    loaded_simulated_pln_full.fit(
+        counts=counts_sim,
+        covariates=covariates_sim,
+        offsets=offsets_sim,
+        keep_going=True,
+    )
+    return loaded_simulated_pln_full
 
 
 @pytest.fixture
-def my_simulated_fitted__plnpca():
-    plnpca = _PLNPCA(rank=rank)
-    plnpca.fit(Y_sim, covariates_sim, O_sim)
-    return plnpca
+def loaded_simulated__plnpca(simulated_fitted__plnpca):
+    simulated_fitted__plnpca.save()
+    loaded_pln_full = _PLNPCA(rank=rank)
+    loaded_pln_full.load()
+    return loaded_pln_full
 
 
-@pytest.mark.parametrize(
-    "simulated_fitted_any_pln",
-    [lf("my_simulated_fitted_pln"), lf("my_simulated_fitted__plnpca")],
-)
-def test_find_right_Sigma(simulated_fitted_any_pln):
-    mse_Sigma = MSE(simulated_fitted_any_pln.Sigma - true_Sigma)
-    assert mse_Sigma < 0.01
+@pytest.fixture
+def loaded_refit_simulated__plnpca(loaded_simulated__plnpca):
+    loaded_simulated__plnpca.fit(
+        counts=counts_sim,
+        covariates=covariates_sim,
+        offsets=offsets_sim,
+        keep_going=True,
+    )
+    return loaded_simulated__plnpca
 
 
-@pytest.mark.parametrize(
-    "pln", [lf("my_simulated_fitted_pln"), lf("my_simulated_fitted__plnpca")]
-)
-def test_find_right_beta(pln):
-    mse_beta = MSE(pln.beta - true_beta)
-    assert mse_beta < 0.1
+@pytest.fixture
+def real_fitted_pln_full():
+    pln_full = PLN()
+    pln_full.fit(counts=counts_real)
+    return pln_full
 
 
-def test_number_of_iterations(my_simulated_fitted_pln):
-    nb_iterations = len(my_simulated_fitted_pln.elbos_list)
-    assert 40 < nb_iterations < 60
+@pytest.fixture
+def loaded_real_pln_full(real_fitted_pln_full):
+    real_fitted_pln_full.save()
+    loaded_pln_full = PLN()
+    loaded_pln_full.load()
+    return loaded_pln_full
 
 
-@pytest.mark.parametrize(
-    "any_pln",
-    [
-        lf("my_simulated_fitted_pln"),
-        lf("my_simulated_fitted__plnpca"),
-        lf("my_real_fitted_pln"),
-        lf("my_real_fitted__plnpca"),
-    ],
-)
-def test_show(any_pln):
-    any_pln.show()
+@pytest.fixture
+def loaded_refit_real_pln_full(loaded_real_pln_full):
+    loaded_real_pln_full.fit(counts=counts_real, keep_going=True)
+    return loaded_real_pln_full
 
 
-@pytest.mark.parametrize(
-    "any_pln",
-    [
-        lf("my_simulated_fitted_pln"),
-        lf("my_simulated_fitted__plnpca"),
-        lf("my_real_fitted_pln"),
-        lf("my_real_fitted__plnpca"),
-    ],
-)
+@pytest.fixture
+def real_fitted__plnpca():
+    plnpca = _PLNPCA(rank=rank)
+    plnpca.fit(counts=counts_real)
+    return plnpca
+
+
+@pytest.fixture
+def loaded_real__plnpca(real_fitted__plnpca):
+    real_fitted__plnpca.save()
+    loaded_plnpca = _PLNPCA(rank=rank)
+    loaded_plnpca.load()
+    return loaded_plnpca
+
+
+@pytest.fixture
+def loaded_refit_real__plnpca(loaded_real__plnpca):
+    loaded_real__plnpca.fit(counts=counts_real, keep_going=True)
+    return loaded_real__plnpca
+
+
+real_pln_full = [
+    lf("real_fitted_pln_full"),
+    lf("loaded_real_pln_full"),
+    lf("loaded_refit_real_pln_full"),
+]
+real__plnpca = [
+    lf("real_fitted__plnpca"),
+    lf("loaded_real__plnpca"),
+    lf("loaded_refit_real__plnpca"),
+]
+simulated_pln_full = [
+    lf("simulated_fitted_pln_full"),
+    lf("loaded_simulated_pln_full"),
+    lf("loaded_refit_simulated_pln_full"),
+]
+simulated__plnpca = [
+    lf("simulated_fitted__plnpca"),
+    lf("loaded_simulated__plnpca"),
+    lf("loaded_refit_simulated__plnpca"),
+]
+
+loaded_sim_pln = [
+    lf("loaded_simulated__plnpca"),
+    lf("loaded_simulated_pln_full"),
+    lf("loaded_refit_simulated_pln_full"),
+    lf("loaded_refit_simulated_pln_full"),
+]
+
+
+@pytest.mark.parametrize("loaded", loaded_sim_pln)
+def test_refit_not_keep_going(loaded):
+    loaded.fit(
+        counts=counts_sim,
+        covariates=covariates_sim,
+        offsets=offsets_sim,
+        keep_going=False,
+    )
+
+
+all_instances = [lf("instance__plnpca"), lf("instance_pln_full")]
+
+all_fitted__plnpca = simulated__plnpca + real__plnpca
+all_fitted_pln_full = simulated_pln_full + real_pln_full
+
+simulated_any_pln = simulated__plnpca + simulated_pln_full
+real_any_pln = real_pln_full + real__plnpca
+all_fitted_models = simulated_any_pln + real_any_pln
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
+def test_properties(any_pln):
+    assert hasattr(any_pln, "latent_variables")
+    assert hasattr(any_pln, "model_parameters")
+    assert hasattr(any_pln, "latent_parameters")
+    assert hasattr(any_pln, "optim_parameters")
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
+def test_show_coef_transform_covariance_pcaprojected(any_pln):
+    any_pln.show()
+    any_pln.plotargs.show_loss()
+    any_pln.plotargs.show_stopping_criterion()
+    assert hasattr(any_pln, "coef")
+    assert callable(any_pln.transform)
+    assert hasattr(any_pln, "covariance")
+    assert callable(any_pln.pca_projected_latent_variables)
+    assert any_pln.pca_projected_latent_variables(n_components=None) is not None
+    with pytest.raises(Exception):
+        any_pln.pca_projected_latent_variables(n_components=any_pln.dim + 1)
+
+
+@pytest.mark.parametrize("sim_pln", simulated_any_pln)
+def test_predict_simulated(sim_pln):
+    X = torch.randn((sim_pln.n_samples, sim_pln.nb_cov - 1))
+    prediction = sim_pln.predict(X)
+    expected = (
+        torch.stack((torch.ones(sim_pln.n_samples, 1), X), axis=1).squeeze()
+        @ sim_pln.coef
+    )
+    assert torch.all(torch.eq(expected, prediction))
+
+
+@pytest.mark.parametrize("real_pln", real_any_pln)
+def test_predict_real(real_pln):
+    prediction = real_pln.predict()
+    expected = torch.ones(real_pln.n_samples, 1) @ real_pln.coef
+    assert torch.all(torch.eq(expected, prediction))
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
 def test_print(any_pln):
     print(any_pln)
 
 
-@pytest.mark.parametrize(
-    "any_instance_pln", [lf("my_instance__plnpca"), lf("my_instance_pln")]
-)
+@pytest.mark.parametrize("any_instance_pln", all_instances)
 def test_verbose(any_instance_pln):
-    any_instance_pln.fit(Y_sim, covariates_sim, O_sim, verbose=True)
+    any_instance_pln.fit(
+        counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim, verbose=True
+    )
+
+
+@pytest.mark.parametrize("sim_pln", simulated_any_pln)
+def test_only_counts(sim_pln):
+    sim_pln.fit(counts=counts_sim)
+
+
+@pytest.mark.parametrize("sim_pln", simulated_any_pln)
+def test_only_counts_and_offsets(sim_pln):
+    sim_pln.fit(counts=counts_sim, offsets=offsets_sim)
+
+
+@pytest.mark.parametrize("sim_pln", simulated_any_pln)
+def test_only_Y_and_cov(sim_pln):
+    sim_pln.fit(counts=counts_sim, covariates=covariates_sim)
+
+
+@pytest.mark.parametrize("simulated_fitted_any_pln", simulated_any_pln)
+def test_find_right_covariance(simulated_fitted_any_pln):
+    mse_covariance = MSE(simulated_fitted_any_pln.covariance - true_covariance)
+    assert mse_covariance < 0.05
+
+
+@pytest.mark.parametrize("sim_pln", simulated_any_pln)
+def test_find_right_coef(sim_pln):
+    mse_coef = MSE(sim_pln.coef - true_coef)
+    assert mse_coef < 0.1
+
+
+def test_number_of_iterations_pln_full(simulated_fitted_pln_full):
+    nb_iterations = len(simulated_fitted_pln_full.elbos_list)
+    assert 50 < nb_iterations < 300
+
+
+def test_computable_elbopca(instance__plnpca, simulated_fitted__plnpca):
+    instance__plnpca.counts = simulated_fitted__plnpca.counts
+    instance__plnpca.covariates = simulated_fitted__plnpca.covariates
+    instance__plnpca.offsets = simulated_fitted__plnpca.offsets
+    instance__plnpca.latent_mean = simulated_fitted__plnpca.latent_mean
+    instance__plnpca.latent_var = simulated_fitted__plnpca.latent_var
+    instance__plnpca.components = simulated_fitted__plnpca.components
+    instance__plnpca.coef = simulated_fitted__plnpca.coef
+    instance__plnpca.compute_elbo()
+
+
+def test_computable_elbo_full(instance_pln_full, simulated_fitted_pln_full):
+    instance_pln_full.counts = simulated_fitted_pln_full.counts
+    instance_pln_full.covariates = simulated_fitted_pln_full.covariates
+    instance_pln_full.offsets = simulated_fitted_pln_full.offsets
+    instance_pln_full.latent_mean = simulated_fitted_pln_full.latent_mean
+    instance_pln_full.latent_var = simulated_fitted_pln_full.latent_var
+    instance_pln_full.covariance = simulated_fitted_pln_full.covariance
+    instance_pln_full.coef = simulated_fitted_pln_full.coef
+    instance_pln_full.compute_elbo()
+
+
+def test_fail_count_setter(simulated_fitted_pln_full):
+    wrong_counts = torch.randint(size=(10, 5), low=0, high=10)
+    with pytest.raises(Exception):
+        simulated_fitted_pln_full.counts = wrong_counts
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
+def test_setter_with_numpy(any_pln):
+    np_counts = any_pln.counts.numpy()
+    any_pln.counts = np_counts
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
+def test_setter_with_pandas(any_pln):
+    pd_counts = pd.DataFrame(any_pln.counts.numpy())
+    any_pln.counts = pd_counts
+
+
+@pytest.mark.parametrize("instance", all_instances)
+def test_random_init(instance):
+    instance.fit(counts_sim, covariates_sim, offsets_sim, do_smart_init=False)
+
+
+@pytest.mark.parametrize("instance", all_instances)
+def test_print_end_of_fitting_message(instance):
+    instance.fit(counts_sim, covariates_sim, offsets_sim, nb_max_iteration=4)
+
+
+@pytest.mark.parametrize("any_pln", all_fitted_models)
+def test_fail_wrong_covariates_prediction(any_pln):
+    X = torch.randn(any_pln.n_samples, any_pln.nb_cov)
+    with pytest.raises(Exception):
+        any_pln.predict(X)
 
 
-@pytest.mark.parametrize(
-    "any_pln", [lf("my_simulated_fitted_pln"), lf("my_simulated_fitted__plnpca")]
-)
-def test_only_Y(any_pln):
-    any_pln.fit(Y_sim)
+@pytest.mark.parametrize("any__plnpca", all_fitted__plnpca)
+def test_latent_var_pca(any__plnpca):
+    assert any__plnpca.transform(project=False).shape == any__plnpca.counts.shape
+    assert any__plnpca.transform().shape == (any__plnpca.n_samples, any__plnpca.rank)
 
 
-@pytest.mark.parametrize(
-    "any_pln", [lf("my_simulated_fitted_pln"), lf("my_simulated_fitted__plnpca")]
-)
-def test_only_Y_and_O(any_pln):
-    any_pln.fit(Y_sim, O_sim)
+@pytest.mark.parametrize("any_pln_full", all_fitted_pln_full)
+def test_latent_var_pln_full(any_pln_full):
+    assert any_pln_full.transform().shape == any_pln_full.counts.shape
 
 
-@pytest.mark.parametrize(
-    "any_pln", [lf("my_simulated_fitted_pln"), lf("my_simulated_fitted__plnpca")]
-)
-def test_only_Y_and_cov(any_pln):
-    any_pln.fit(Y_sim, covariates_sim)
+def test_wrong_rank():
+    instance = _PLNPCA(counts_sim.shape[1] + 1)
+    with pytest.warns(UserWarning):
+        instance.fit(counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim)
diff --git a/tests/test_plnpca.py b/tests/test_plnpca.py
index 743a9019ae6d833fb78db0600377cda2dfa70252..db0e324aac2d70ff3b98a517aca8fec9f01b0f4f 100644
--- a/tests/test_plnpca.py
+++ b/tests/test_plnpca.py
@@ -1,12 +1,24 @@
+import os
+
 import pytest
-from pytest_lazyfixture import lazy_fixture
+from pytest_lazyfixture import lazy_fixture as lf
+from pyPLNmodels.models import PLNPCA, _PLNPCA
+from pyPLNmodels import get_simulated_count_data, get_real_count_data
+from tests.utils import MSE
 
-from pyPLNmodels.VEM import PLN, PLNPCA
-from tests.utils import get_simulated_data, MSE
+import matplotlib.pyplot as plt
+import numpy as np
 
-RANKS = [2, 4]
+(
+    counts_sim,
+    covariates_sim,
+    offsets_sim,
+    true_covariance,
+    true_coef,
+) = get_simulated_count_data(return_true_param=True)
 
-Y_sim, covariates_sim, O_sim, true_Sigma, true_beta = get_simulated_data()
+counts_real = get_real_count_data()
+RANKS = [2, 8]
 
 
 @pytest.fixture
@@ -16,25 +28,131 @@ def my_instance_plnpca():
 
 
 @pytest.fixture
-def simulated_fitted_plnpca():
-    plnpca = PLNPCA(RANKS)
-    plnpca.fit(Y_sim, covariates_sim, O_sim)
-    return plnpca
+def real_fitted_plnpca(my_instance_plnpca):
+    my_instance_plnpca.fit(counts_real)
+    return my_instance_plnpca
 
 
-def test_find_right_Sigma(simulated_fitted_plnpca):
-    passed = True
-    for model in simulated_fitted_plnpca.models:
-        mse_Sigma = MSE(model.Sigma - true_Sigma)
-        if mse_Sigma > 0.1:
-            passed = False
-    assert passed
+@pytest.fixture
+def simulated_fitted_plnpca(my_instance_plnpca):
+    my_instance_plnpca.fit(
+        counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim
+    )
+    return my_instance_plnpca
+
+
+@pytest.fixture
+def one_simulated_fitted_plnpca():
+    model = PLNPCA(ranks=2)
+    model.fit(counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim)
+    return model
+
+
+@pytest.fixture
+def real_best_aic(real_fitted_plnpca):
+    return real_fitted_plnpca.best_model("AIC")
+
+
+@pytest.fixture
+def real_best_bic(real_fitted_plnpca):
+    return real_fitted_plnpca.best_model("BIC")
+
+
+@pytest.fixture
+def simulated_best_aic(simulated_fitted_plnpca):
+    return simulated_fitted_plnpca.best_model("AIC")
+
+
+@pytest.fixture
+def simulated_best_bic(simulated_fitted_plnpca):
+    return simulated_fitted_plnpca.best_model("BIC")
+
+
+simulated_best_models = [lf("simulated_best_aic"), lf("simulated_best_bic")]
+real_best_models = [lf("real_best_aic"), lf("real_best_bic")]
+best_models = simulated_best_models + real_best_models
+
 
+all_fitted_simulated_plnpca = [
+    lf("simulated_fitted_plnpca"),
+    lf("one_simulated_fitted_plnpca"),
+]
+all_fitted_plnpca = [lf("real_fitted_plnpca")] + all_fitted_simulated_plnpca
 
-def test_find_right_beta(simulated_fitted_plnpca):
+
+def test_print_plnpca(simulated_fitted_plnpca):
+    print(simulated_fitted_plnpca)
+
+
+@pytest.mark.parametrize("best_model", best_models)
+def test_best_model(best_model):
+    print(best_model)
+
+
+@pytest.mark.parametrize("best_model", best_models)
+def test_projected_variables(best_model):
+    plv = best_model.projected_latent_variables
+    assert plv.shape[0] == best_model.n_samples and plv.shape[1] == best_model.rank
+
+
+def test_save_load_back_and_refit(simulated_fitted_plnpca):
+    simulated_fitted_plnpca.save()
+    new = PLNPCA(ranks=RANKS)
+    new.load()
+    new.fit(counts=counts_sim, covariates=covariates_sim, offsets=offsets_sim)
+
+
+@pytest.mark.parametrize("plnpca", all_fitted_simulated_plnpca)
+def test_find_right_covariance(plnpca):
     passed = True
-    for model in simulated_fitted_plnpca.models:
-        mse_beta = MSE(model.beta - true_beta)
-        if mse_beta > 0.1:
-            passed = False
-    assert passed
+    for model in plnpca.models:
+        mse_covariance = MSE(model.covariance - true_covariance)
+        assert mse_covariance < 0.3
+
+
+@pytest.mark.parametrize("plnpca", all_fitted_simulated_plnpca)
+def test_find_right_coef(plnpca):
+    for model in plnpca.models:
+        mse_coef = MSE(model.coef - true_coef)
+        assert mse_coef < 0.3
+
+
+@pytest.mark.parametrize("all_pca", all_fitted_plnpca)
+def test_additional_methods_pca(all_pca):
+    all_pca.show()
+    all_pca.BIC
+    all_pca.AIC
+    all_pca.loglikes
+
+
+@pytest.mark.parametrize("all_pca", all_fitted_plnpca)
+def test_viz_pca(all_pca):
+    _, ax = plt.subplots()
+    all_pca[2].viz(ax=ax)
+    plt.show()
+    all_pca[2].viz()
+    plt.show()
+    n_samples = all_pca.n_samples
+    colors = np.random.randint(low=0, high=2, size=n_samples)
+    all_pca[2].viz(colors=colors)
+    plt.show()
+
+
+@pytest.mark.parametrize(
+    "pca", [lf("real_fitted_plnpca"), lf("simulated_fitted_plnpca")]
+)
+def test_fails_viz_pca(pca):
+    with pytest.raises(RuntimeError):
+        pca[8].viz()
+
+
+@pytest.mark.parametrize("all_pca", all_fitted_plnpca)
+def test_closest(all_pca):
+    with pytest.warns(UserWarning):
+        all_pca[9]
+
+
+@pytest.mark.parametrize("plnpca", all_fitted_plnpca)
+def test_wrong_criterion(plnpca):
+    with pytest.raises(ValueError):
+        plnpca.best_model("AIK")
diff --git a/tests/utils.py b/tests/utils.py
index ea97306f8d848d42a4dd332145a3945a2fe1e54a..0cc7f2d7b9600b724015896ed97813699e153239 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -1,59 +1,4 @@
-import pandas as pd
 import torch
-from sklearn.preprocessing import LabelEncoder
-import scanpy
-import numpy as np
-import os
-
-
-def get_simulated_data():
-    Y = pd.read_csv("../example_data/test_data/Y_test.csv")
-    covariates = pd.read_csv("../example_data/test_data/cov_test.csv")
-    O = pd.read_csv("../example_data/test_data/O_test.csv")
-    true_Sigma = torch.from_numpy(
-        pd.read_csv(
-            "../example_data/test_data/true_parameters/true_Sigma_test.csv"
-        ).values
-    )
-    true_beta = torch.from_numpy(
-        pd.read_csv(
-            "../example_data/test_data/true_parameters/true_beta_test.csv"
-        ).values
-    )
-    return Y, covariates, O, true_Sigma, true_beta
-
-
-def get_real_data(take_oaks=True, max_class=5, max_n=500, max_dim=20):
-    if take_oaks is True:
-        Y = pd.read_csv("../example_data/real_data/oaks_counts.csv")
-        n, p = Y.shape
-        covariates = None
-        O = pd.read_csv("../example_data/real_data/oaks_offsets.csv")
-        return Y, covariates, O
-    else:
-        data = scanpy.read_h5ad(
-            "example_data/real_data/2k_cell_per_study_10studies.h5ad"
-        )
-        Y = data.X.toarray()[:max_n]
-        GT_name = data.obs["standard_true_celltype_v5"][:max_n]
-        le = LabelEncoder()
-        GT = le.fit_transform(GT_name)
-        filter = GT < max_class
-        unique, index = np.unique(GT, return_counts=True)
-        enough_elem = index > 15
-        classes_with_enough_elem = unique[enough_elem]
-        filter_bis = np.isin(GT, classes_with_enough_elem)
-        mask = filter * filter_bis
-        GT = GT[mask]
-        GT_name = GT_name[mask]
-        Y = Y[mask]
-        GT = le.fit_transform(GT)
-        not_only_zeros = np.sum(Y, axis=0) > 0
-        Y = Y[:, not_only_zeros]
-        var = np.var(Y, axis=0)
-        most_variables = np.argsort(var)[-max_dim:]
-        Y = Y[:, most_variables]
-        return Y, GT, list(GT_name.values.__array__())
 
 
 def MSE(t):