---
tags:
- ColBERT
- PyLate
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:640000
- loss:Distillation
base_model: Alibaba-NLP/gte-modernbert-base
pipeline_tag: sentence-similarity
library_name: PyLate
metrics:
- MaxSim_accuracy@1
- MaxSim_accuracy@3
- MaxSim_accuracy@5
- MaxSim_accuracy@10
- MaxSim_precision@1
- MaxSim_precision@3
- MaxSim_precision@5
- MaxSim_precision@10
- MaxSim_recall@1
- MaxSim_recall@3
- MaxSim_recall@5
- MaxSim_recall@10
- MaxSim_ndcg@10
- MaxSim_mrr@10
- MaxSim_map@100
model-index:
- name: PyLate model based on Alibaba-NLP/gte-modernbert-base
results:
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoClimateFEVER
type: NanoClimateFEVER
metrics:
- type: MaxSim_accuracy@1
value: 0.36
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.62
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.78
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.86
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.36
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.2333333333333333
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.20799999999999996
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.12799999999999997
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.18333333333333332
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.289
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.41566666666666663
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.49566666666666664
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.41477895139843374
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.526579365079365
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.33473812643311207
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoDBPedia
type: NanoDBPedia
metrics:
- type: MaxSim_accuracy@1
value: 0.88
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.94
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.96
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.98
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.88
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.7133333333333334
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.6560000000000001
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.572
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.11798996781634019
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.23074158968531658
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.2961618059276896
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.4145532152487909
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.7295518860528665
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.9168571428571428
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.5883869727264871
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoFEVER
type: NanoFEVER
metrics:
- type: MaxSim_accuracy@1
value: 0.92
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.98
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.98
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 1.0
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.92
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.35999999999999993
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.21599999999999994
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.10999999999999999
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.8566666666666667
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.96
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.96
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.98
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.9451911044041129
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.9522222222222223
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.9270501207729468
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoFiQA2018
type: NanoFiQA2018
metrics:
- type: MaxSim_accuracy@1
value: 0.56
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.66
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.74
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.8
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.56
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.32666666666666666
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.25599999999999995
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.15199999999999997
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.30924603174603177
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.47840476190476194
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.5751746031746031
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.6411984126984127
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.5669909336903424
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.6359444444444444
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.5031998196513616
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoHotpotQA
type: NanoHotpotQA
metrics:
- type: MaxSim_accuracy@1
value: 0.92
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 1.0
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 1.0
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 1.0
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.92
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.58
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.35999999999999993
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.18599999999999994
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.46
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.87
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.9
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.93
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.9011747095216048
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.96
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.8591508921772081
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoMSMARCO
type: NanoMSMARCO
metrics:
- type: MaxSim_accuracy@1
value: 0.54
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.68
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.74
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.92
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.54
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.22666666666666666
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.14800000000000002
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.092
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.54
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.68
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.74
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.92
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.7088869908160952
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.6446507936507936
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.6496349206349206
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoNFCorpus
type: NanoNFCorpus
metrics:
- type: MaxSim_accuracy@1
value: 0.56
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.68
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.74
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.76
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.56
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.43333333333333335
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.39199999999999996
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.304
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.06640185752724687
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.10198877096622012
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.12839743828750172
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.15658989769166
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.3957047406068243
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.627
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.1917924344366858
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoNQ
type: NanoNQ
metrics:
- type: MaxSim_accuracy@1
value: 0.64
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.82
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.86
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.9
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.64
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.2866666666666666
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.17999999999999997
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.1
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.61
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.78
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.82
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.88
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.7645227466201794
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.7390000000000001
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.7239323294755705
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoQuoraRetrieval
type: NanoQuoraRetrieval
metrics:
- type: MaxSim_accuracy@1
value: 0.96
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 1.0
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 1.0
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 1.0
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.96
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.4
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.25599999999999995
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.13399999999999998
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.8473333333333334
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.9453333333333334
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.9693333333333334
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.9893333333333334
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.9691448095973965
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.9766666666666667
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.9551871794871795
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoSCIDOCS
type: NanoSCIDOCS
metrics:
- type: MaxSim_accuracy@1
value: 0.48
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.74
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.78
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.84
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.48
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.3999999999999999
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.292
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.19399999999999995
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.10066666666666667
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.24666666666666665
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.29966666666666664
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.39666666666666667
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.3986767701602276
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.6137222222222222
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.3163385555719993
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoArguAna
type: NanoArguAna
metrics:
- type: MaxSim_accuracy@1
value: 0.3
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.62
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.7
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.82
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.3
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.20666666666666667
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.14
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.08199999999999999
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.3
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.62
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.7
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.82
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.5609089627577635
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.4774603174603175
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.4824361431413148
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoSciFact
type: NanoSciFact
metrics:
- type: MaxSim_accuracy@1
value: 0.74
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.86
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.9
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.94
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.74
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.3
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.19599999999999995
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.10399999999999998
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.715
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.83
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.885
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.93
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.8371556505161787
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.8116666666666668
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.8048798701298702
name: Maxsim Map@100
- task:
type: py-late-information-retrieval
name: Py Late Information Retrieval
dataset:
name: NanoTouche2020
type: NanoTouche2020
metrics:
- type: MaxSim_accuracy@1
value: 0.7755102040816326
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.9387755102040817
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.9795918367346939
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.9795918367346939
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.7755102040816326
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.6598639455782314
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.6571428571428573
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.5183673469387755
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.05176652252904378
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.13618168510556633
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.2193408037582337
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.33397423594107617
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.5926586898856947
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.8629251700680272
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.42574993112112997
name: Maxsim Map@100
- task:
type: nano-beir
name: Nano BEIR
dataset:
name: NanoBEIR mean
type: NanoBEIR_mean
metrics:
- type: MaxSim_accuracy@1
value: 0.6642700156985872
name: Maxsim Accuracy@1
- type: MaxSim_accuracy@3
value: 0.8106750392464678
name: Maxsim Accuracy@3
- type: MaxSim_accuracy@5
value: 0.8584301412872841
name: Maxsim Accuracy@5
- type: MaxSim_accuracy@10
value: 0.9076609105180532
name: Maxsim Accuracy@10
- type: MaxSim_precision@1
value: 0.6642700156985872
name: Maxsim Precision@1
- type: MaxSim_precision@3
value: 0.39434850863422294
name: Maxsim Precision@3
- type: MaxSim_precision@5
value: 0.3043956043956044
name: Maxsim Precision@5
- type: MaxSim_precision@10
value: 0.20587441130298273
name: Maxsim Precision@10
- type: MaxSim_recall@1
value: 0.3968003368937432
name: Maxsim Recall@1
- type: MaxSim_recall@3
value: 0.5514089852047589
name: Maxsim Recall@3
- type: MaxSim_recall@5
value: 0.6083647167549766
name: Maxsim Recall@5
- type: MaxSim_recall@10
value: 0.6836909560189698
name: Maxsim Recall@10
- type: MaxSim_ndcg@10
value: 0.6757959189252093
name: Maxsim Ndcg@10
- type: MaxSim_mrr@10
value: 0.7495919239490669
name: Maxsim Mrr@10
- type: MaxSim_map@100
value: 0.5971136381353681
name: Maxsim Map@100
---
# PyLate model based on Alibaba-NLP/gte-modernbert-base
This is a [PyLate](https://github.com/lightonai/pylate) model trained on the [ms-marco-en-bge-gemma](https://huggingface.co/datasets/lightonai/ms-marco-en-bge-gemma) dataset. It maps sentences & paragraphs to sequences of 128-dimensional dense vectors and can be used for semantic textual similarity using the MaxSim operator.
## Model Details
### Model Description
- **Model Type:** PyLate model
- **Base model:** [Alibaba-NLP/gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base)
- **Document Length:** 300 tokens
- **Query Length:** 32 tokens
- **Output Dimensionality:** 128 dimensions
- **Similarity Function:** MaxSim
- **Training Dataset:**
- [ms-marco-en-bge-gemma](https://huggingface.co/datasets/lightonai/ms-marco-en-bge-gemma)
- **Language:** English
- **License:** Apache 2.0
### Model Sources
- **Documentation:** [PyLate Documentation](https://lightonai.github.io/pylate/)
- **Repository:** [PyLate on GitHub](https://github.com/lightonai/pylate)
- **Hugging Face:** [PyLate models on Hugging Face](https://huggingface.co/models?library=PyLate)
### Full Model Architecture
```
ColBERT(
(0): Transformer({'max_seq_length': 299, 'do_lower_case': False}) with Transformer model: ModernBertModel
(1): Dense({'in_features': 768, 'out_features': 128, 'bias': False, 'activation_function': 'torch.nn.modules.linear.Identity'})
)
```
## Usage
First install the PyLate library:
```bash
pip install -U pylate
```
### Retrieval
PyLate provides a streamlined interface to index and retrieve documents using ColBERT models. The index leverages the Voyager HNSW index to efficiently handle document embeddings and enable fast retrieval.
#### Indexing documents
First, load the ColBERT model and initialize the Voyager index, then encode and index your documents:
```python
from pylate import indexes, models, retrieve
# Step 1: Load the ColBERT model
model = models.ColBERT(
model_name_or_path=pylate_model_id,
)
# Step 2: Initialize the Voyager index
index = indexes.Voyager(
index_folder="pylate-index",
index_name="index",
override=True, # This overwrites the existing index if any
)
# Step 3: Encode the documents
documents_ids = ["1", "2", "3"]
documents = ["document 1 text", "document 2 text", "document 3 text"]
documents_embeddings = model.encode(
documents,
batch_size=32,
is_query=False, # Ensure that it is set to False to indicate that these are documents, not queries
show_progress_bar=True,
)
# Step 4: Add document embeddings to the index by providing embeddings and corresponding ids
index.add_documents(
documents_ids=documents_ids,
documents_embeddings=documents_embeddings,
)
```
Note that you do not have to recreate the index and encode the documents every time. Once you have created an index and added the documents, you can re-use the index later by loading it:
```python
# To load an index, simply instantiate it with the correct folder/name and without overriding it
index = indexes.Voyager(
index_folder="pylate-index",
index_name="index",
)
```
#### Retrieving top-k documents for queries
Once the documents are indexed, you can retrieve the top-k most relevant documents for a given set of queries.
To do so, initialize the ColBERT retriever with the index you want to search in, encode the queries and then retrieve the top-k documents to get the top matches ids and relevance scores:
```python
# Step 1: Initialize the ColBERT retriever
retriever = retrieve.ColBERT(index=index)
# Step 2: Encode the queries
queries_embeddings = model.encode(
["query for document 3", "query for document 1"],
batch_size=32,
is_query=True, # # Ensure that it is set to False to indicate that these are queries
show_progress_bar=True,
)
# Step 3: Retrieve top-k documents
scores = retriever.retrieve(
queries_embeddings=queries_embeddings,
k=10, # Retrieve the top 10 matches for each query
)
```
### Reranking
If you only want to use the ColBERT model to perform reranking on top of your first-stage retrieval pipeline without building an index, you can simply use rank function and pass the queries and documents to rerank:
```python
from pylate import rank, models
queries = [
"query A",
"query B",
]
documents = [
["document A", "document B"],
["document 1", "document C", "document B"],
]
documents_ids = [
[1, 2],
[1, 3, 2],
]
model = models.ColBERT(
model_name_or_path=pylate_model_id,
)
queries_embeddings = model.encode(
queries,
is_query=True,
)
documents_embeddings = model.encode(
documents,
is_query=False,
)
reranked_documents = rank.rerank(
documents_ids=documents_ids,
queries_embeddings=queries_embeddings,
documents_embeddings=documents_embeddings,
)
```
## Evaluation
### Metrics
#### BEIR Benchmark
GTE-ModernColBERT is the first model to outpeform ColBERT-small on the BEIR benchmark.
As reproduction in the IR domain is challenging, we worked closely with Benjamin ClaviƩ, the author of ColBERT-small to reproduce the evaluation setup of this model. Despite all these efforts and reducing to the maximum the difference in scores in most of the datasets, some are still a bit different.
For this reason, we also report the results of ColBERT-small in the same setup we used to evaluate GTE-ModernColBERT for completness and fair comparison.
| Model | Average | FiQA2018 | NFCorpus | TREC-COVID | Touche2020 | ArguAna | QuoraRetrieval | SCIDOCS | SciFact | NQ | ClimateFEVER | HotpotQA | DBPedia | CQADupstack | FEVER | MSMARCO |
|--------------------------|-------------|----------|----------|------------|------------|---------|----------------|---------|---------|-------|--------------|----------|---------|-------------|-------|---------|
| GTE-ModernColBERT | **54.89** | **48.51** | 37.93 | 83.59 | **31.23** | 48.51 | 86.61 | **19.06** | **76.34** | 61.8 | 30.62 | **77.32** | **48.03** | **41** | 87.44 | **45.32** |
| ColBERT-small (reported) | 53.79 | 41.15 | 37.3 | **84.59** | 25.69 | **50.09** | 87.72 | 18.42 | 74.77 | 59.1 | **33.07** | 76.11 | 45.58 | 38.75 | **90.96** | 43.5 |
| JinaColBERT-v2 | | 40.8 | 34.6 | 83.4 | 27.4 | 36.6 | **88.7** | 18.6 | 67.8 | **64** | 23.9 | 76.6 | 47.1 | | 80.5 | |
| ColBERT-small (rerunned) | 53.35 | 41.01 | 36.86 | 83.14 | 24.95 | 46.76 | 87.89 | 18.72 | 74.02 | 59.42 | 32.83 | 76.88 | 46.36 | 39.36 | 88.66 | 43.44 |
#### PyLate Information Retrieval
* Datasets: `NanoClimateFEVER`, `NanoDBPedia`, `NanoFEVER`, `NanoFiQA2018`, `NanoHotpotQA`, `NanoMSMARCO`, `NanoNFCorpus`, `NanoNQ`, `NanoQuoraRetrieval`, `NanoSCIDOCS`, `NanoArguAna`, `NanoSciFact` and `NanoTouche2020`
* Evaluated with pylate.evaluation.pylate_information_retrieval_evaluator.PyLateInformationRetrievalEvaluator
| Metric | NanoClimateFEVER | NanoDBPedia | NanoFEVER | NanoFiQA2018 | NanoHotpotQA | NanoMSMARCO | NanoNFCorpus | NanoNQ | NanoQuoraRetrieval | NanoSCIDOCS | NanoArguAna | NanoSciFact | NanoTouche2020 |
|:--------------------|:-----------------|:------------|:-----------|:-------------|:-------------|:------------|:-------------|:-----------|:-------------------|:------------|:------------|:------------|:---------------|
| MaxSim_accuracy@1 | 0.36 | 0.88 | 0.92 | 0.56 | 0.92 | 0.54 | 0.56 | 0.64 | 0.96 | 0.48 | 0.3 | 0.74 | 0.7755 |
| MaxSim_accuracy@3 | 0.62 | 0.94 | 0.98 | 0.66 | 1.0 | 0.68 | 0.68 | 0.82 | 1.0 | 0.74 | 0.62 | 0.86 | 0.9388 |
| MaxSim_accuracy@5 | 0.78 | 0.96 | 0.98 | 0.74 | 1.0 | 0.74 | 0.74 | 0.86 | 1.0 | 0.78 | 0.7 | 0.9 | 0.9796 |
| MaxSim_accuracy@10 | 0.86 | 0.98 | 1.0 | 0.8 | 1.0 | 0.92 | 0.76 | 0.9 | 1.0 | 0.84 | 0.82 | 0.94 | 0.9796 |
| MaxSim_precision@1 | 0.36 | 0.88 | 0.92 | 0.56 | 0.92 | 0.54 | 0.56 | 0.64 | 0.96 | 0.48 | 0.3 | 0.74 | 0.7755 |
| MaxSim_precision@3 | 0.2333 | 0.7133 | 0.36 | 0.3267 | 0.58 | 0.2267 | 0.4333 | 0.2867 | 0.4 | 0.4 | 0.2067 | 0.3 | 0.6599 |
| MaxSim_precision@5 | 0.208 | 0.656 | 0.216 | 0.256 | 0.36 | 0.148 | 0.392 | 0.18 | 0.256 | 0.292 | 0.14 | 0.196 | 0.6571 |
| MaxSim_precision@10 | 0.128 | 0.572 | 0.11 | 0.152 | 0.186 | 0.092 | 0.304 | 0.1 | 0.134 | 0.194 | 0.082 | 0.104 | 0.5184 |
| MaxSim_recall@1 | 0.1833 | 0.118 | 0.8567 | 0.3092 | 0.46 | 0.54 | 0.0664 | 0.61 | 0.8473 | 0.1007 | 0.3 | 0.715 | 0.0518 |
| MaxSim_recall@3 | 0.289 | 0.2307 | 0.96 | 0.4784 | 0.87 | 0.68 | 0.102 | 0.78 | 0.9453 | 0.2467 | 0.62 | 0.83 | 0.1362 |
| MaxSim_recall@5 | 0.4157 | 0.2962 | 0.96 | 0.5752 | 0.9 | 0.74 | 0.1284 | 0.82 | 0.9693 | 0.2997 | 0.7 | 0.885 | 0.2193 |
| MaxSim_recall@10 | 0.4957 | 0.4146 | 0.98 | 0.6412 | 0.93 | 0.92 | 0.1566 | 0.88 | 0.9893 | 0.3967 | 0.82 | 0.93 | 0.334 |
| **MaxSim_ndcg@10** | **0.4148** | **0.7296** | **0.9452** | **0.567** | **0.9012** | **0.7089** | **0.3957** | **0.7645** | **0.9691** | **0.3987** | **0.5609** | **0.8372** | **0.5927** |
| MaxSim_mrr@10 | 0.5266 | 0.9169 | 0.9522 | 0.6359 | 0.96 | 0.6447 | 0.627 | 0.739 | 0.9767 | 0.6137 | 0.4775 | 0.8117 | 0.8629 |
| MaxSim_map@100 | 0.3347 | 0.5884 | 0.9271 | 0.5032 | 0.8592 | 0.6496 | 0.1918 | 0.7239 | 0.9552 | 0.3163 | 0.4824 | 0.8049 | 0.4257 |
#### Nano BEIR
* Dataset: `NanoBEIR_mean`
* Evaluated with pylate.evaluation.nano_beir_evaluator.NanoBEIREvaluator
| Metric | Value |
|:--------------------|:-----------|
| MaxSim_accuracy@1 | 0.6643 |
| MaxSim_accuracy@3 | 0.8107 |
| MaxSim_accuracy@5 | 0.8584 |
| MaxSim_accuracy@10 | 0.9077 |
| MaxSim_precision@1 | 0.6643 |
| MaxSim_precision@3 | 0.3943 |
| MaxSim_precision@5 | 0.3044 |
| MaxSim_precision@10 | 0.2059 |
| MaxSim_recall@1 | 0.3968 |
| MaxSim_recall@3 | 0.5514 |
| MaxSim_recall@5 | 0.6084 |
| MaxSim_recall@10 | 0.6837 |
| **MaxSim_ndcg@10** | **0.6758** |
| MaxSim_mrr@10 | 0.7496 |
| MaxSim_map@100 | 0.5971 |
## Training Details
### Training Hyperparameters
#### Non-Default Hyperparameters
- `eval_strategy`: steps
- `per_device_train_batch_size`: 16
- `learning_rate`: 3e-05
- `bf16`: True
#### All Hyperparameters
Click to expand
- `overwrite_output_dir`: False
- `do_predict`: False
- `eval_strategy`: steps
- `prediction_loss_only`: True
- `per_device_train_batch_size`: 16
- `per_device_eval_batch_size`: 8
- `per_gpu_train_batch_size`: None
- `per_gpu_eval_batch_size`: None
- `gradient_accumulation_steps`: 1
- `eval_accumulation_steps`: None
- `torch_empty_cache_steps`: None
- `learning_rate`: 3e-05
- `weight_decay`: 0.0
- `adam_beta1`: 0.9
- `adam_beta2`: 0.999
- `adam_epsilon`: 1e-08
- `max_grad_norm`: 1.0
- `num_train_epochs`: 3
- `max_steps`: -1
- `lr_scheduler_type`: linear
- `lr_scheduler_kwargs`: {}
- `warmup_ratio`: 0.0
- `warmup_steps`: 0
- `log_level`: passive
- `log_level_replica`: warning
- `log_on_each_node`: True
- `logging_nan_inf_filter`: True
- `save_safetensors`: True
- `save_on_each_node`: False
- `save_only_model`: False
- `restore_callback_states_from_checkpoint`: False
- `no_cuda`: False
- `use_cpu`: False
- `use_mps_device`: False
- `seed`: 42
- `data_seed`: None
- `jit_mode_eval`: False
- `use_ipex`: False
- `bf16`: True
- `fp16`: False
- `fp16_opt_level`: O1
- `half_precision_backend`: auto
- `bf16_full_eval`: False
- `fp16_full_eval`: False
- `tf32`: None
- `local_rank`: 6
- `ddp_backend`: None
- `tpu_num_cores`: None
- `tpu_metrics_debug`: False
- `debug`: []
- `dataloader_drop_last`: True
- `dataloader_num_workers`: 0
- `dataloader_prefetch_factor`: None
- `past_index`: -1
- `disable_tqdm`: False
- `remove_unused_columns`: True
- `label_names`: None
- `load_best_model_at_end`: False
- `ignore_data_skip`: False
- `fsdp`: []
- `fsdp_min_num_params`: 0
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
- `fsdp_transformer_layer_cls_to_wrap`: None
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
- `deepspeed`: None
- `label_smoothing_factor`: 0.0
- `optim`: adamw_torch
- `optim_args`: None
- `adafactor`: False
- `group_by_length`: False
- `length_column_name`: length
- `ddp_find_unused_parameters`: None
- `ddp_bucket_cap_mb`: None
- `ddp_broadcast_buffers`: False
- `dataloader_pin_memory`: True
- `dataloader_persistent_workers`: False
- `skip_memory_metrics`: True
- `use_legacy_prediction_loop`: False
- `push_to_hub`: False
- `resume_from_checkpoint`: None
- `hub_model_id`: None
- `hub_strategy`: every_save
- `hub_private_repo`: None
- `hub_always_push`: False
- `gradient_checkpointing`: False
- `gradient_checkpointing_kwargs`: None
- `include_inputs_for_metrics`: False
- `include_for_metrics`: []
- `eval_do_concat_batches`: True
- `fp16_backend`: auto
- `push_to_hub_model_id`: None
- `push_to_hub_organization`: None
- `mp_parameters`:
- `auto_find_batch_size`: False
- `full_determinism`: False
- `torchdynamo`: None
- `ray_scope`: last
- `ddp_timeout`: 1800
- `torch_compile`: False
- `torch_compile_backend`: None
- `torch_compile_mode`: None
- `dispatch_batches`: None
- `split_batches`: None
- `include_tokens_per_second`: False
- `include_num_input_tokens_seen`: False
- `neftune_noise_alpha`: None
- `optim_target_modules`: None
- `batch_eval_metrics`: False
- `eval_on_start`: False
- `use_liger_kernel`: False
- `eval_use_gather_object`: False
- `average_tokens_across_devices`: False
- `prompts`: None
- `batch_sampler`: batch_sampler
- `multi_dataset_batch_sampler`: proportional
### Training Logs
Click to expand
| Epoch | Step | Training Loss | NanoClimateFEVER_MaxSim_ndcg@10 | NanoDBPedia_MaxSim_ndcg@10 | NanoFEVER_MaxSim_ndcg@10 | NanoFiQA2018_MaxSim_ndcg@10 | NanoHotpotQA_MaxSim_ndcg@10 | NanoMSMARCO_MaxSim_ndcg@10 | NanoNFCorpus_MaxSim_ndcg@10 | NanoNQ_MaxSim_ndcg@10 | NanoQuoraRetrieval_MaxSim_ndcg@10 | NanoSCIDOCS_MaxSim_ndcg@10 | NanoArguAna_MaxSim_ndcg@10 | NanoSciFact_MaxSim_ndcg@10 | NanoTouche2020_MaxSim_ndcg@10 | NanoBEIR_mean_MaxSim_ndcg@10 |
|:------:|:-----:|:-------------:|:-------------------------------:|:--------------------------:|:------------------------:|:---------------------------:|:---------------------------:|:--------------------------:|:---------------------------:|:---------------------:|:---------------------------------:|:--------------------------:|:--------------------------:|:--------------------------:|:-----------------------------:|:----------------------------:|
| 0.004 | 20 | 0.0493 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.008 | 40 | 0.0434 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.012 | 60 | 0.0324 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.016 | 80 | 0.0238 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.02 | 100 | 0.0202 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.024 | 120 | 0.0186 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.028 | 140 | 0.0172 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.032 | 160 | 0.0164 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.036 | 180 | 0.0157 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.04 | 200 | 0.0153 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.044 | 220 | 0.0145 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.048 | 240 | 0.014 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.052 | 260 | 0.0138 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.056 | 280 | 0.0135 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.06 | 300 | 0.0132 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.064 | 320 | 0.0129 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.068 | 340 | 0.0126 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.072 | 360 | 0.0123 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.076 | 380 | 0.0122 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.08 | 400 | 0.012 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.084 | 420 | 0.0121 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.088 | 440 | 0.0115 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.092 | 460 | 0.0113 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.096 | 480 | 0.0112 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.1 | 500 | 0.0111 | 0.3085 | 0.6309 | 0.9206 | 0.5303 | 0.8618 | 0.6893 | 0.3703 | 0.7163 | 0.9548 | 0.3885 | 0.4682 | 0.7930 | 0.5982 | 0.6331 |
| 0.104 | 520 | 0.0109 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.108 | 540 | 0.0109 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.112 | 560 | 0.0109 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.116 | 580 | 0.0105 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.12 | 600 | 0.0102 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.124 | 620 | 0.0104 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.128 | 640 | 0.0103 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.132 | 660 | 0.01 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.136 | 680 | 0.0101 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.14 | 700 | 0.0098 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.144 | 720 | 0.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.148 | 740 | 0.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.152 | 760 | 0.0096 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.156 | 780 | 0.0096 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.16 | 800 | 0.0094 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.164 | 820 | 0.0096 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.168 | 840 | 0.0095 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.172 | 860 | 0.0093 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.176 | 880 | 0.0092 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.18 | 900 | 0.0093 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.184 | 920 | 0.009 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.188 | 940 | 0.009 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.192 | 960 | 0.0089 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.196 | 980 | 0.0089 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.2 | 1000 | 0.0089 | 0.3148 | 0.6586 | 0.9335 | 0.5374 | 0.8810 | 0.6805 | 0.3746 | 0.7368 | 0.9486 | 0.3955 | 0.4824 | 0.8219 | 0.6089 | 0.6442 |
| 0.204 | 1020 | 0.0088 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.208 | 1040 | 0.0089 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.212 | 1060 | 0.0088 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.216 | 1080 | 0.0086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.22 | 1100 | 0.0087 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.224 | 1120 | 0.0088 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.228 | 1140 | 0.0086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.232 | 1160 | 0.0086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.236 | 1180 | 0.0084 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.24 | 1200 | 0.0086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.244 | 1220 | 0.0085 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.248 | 1240 | 0.0084 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.252 | 1260 | 0.0084 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.256 | 1280 | 0.0081 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.26 | 1300 | 0.0083 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.264 | 1320 | 0.0084 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.268 | 1340 | 0.0082 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.272 | 1360 | 0.0082 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.276 | 1380 | 0.008 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.28 | 1400 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.284 | 1420 | 0.0079 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.288 | 1440 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.292 | 1460 | 0.0081 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.296 | 1480 | 0.0081 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.3 | 1500 | 0.0079 | 0.3510 | 0.6590 | 0.9285 | 0.5463 | 0.8893 | 0.6853 | 0.3800 | 0.7370 | 0.9513 | 0.3980 | 0.5268 | 0.8268 | 0.6130 | 0.6533 |
| 0.304 | 1520 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.308 | 1540 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.312 | 1560 | 0.0077 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.316 | 1580 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.32 | 1600 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.324 | 1620 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.328 | 1640 | 0.0078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.332 | 1660 | 0.0076 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.336 | 1680 | 0.0076 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.34 | 1700 | 0.0077 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.344 | 1720 | 0.0076 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.348 | 1740 | 0.0074 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.352 | 1760 | 0.0074 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.356 | 1780 | 0.0075 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.36 | 1800 | 0.0076 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.364 | 1820 | 0.0075 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.368 | 1840 | 0.0073 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.372 | 1860 | 0.0075 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.376 | 1880 | 0.0073 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.38 | 1900 | 0.0074 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.384 | 1920 | 0.0072 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.388 | 1940 | 0.0072 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.392 | 1960 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.396 | 1980 | 0.0073 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.4 | 2000 | 0.0071 | 0.3551 | 0.6807 | 0.9311 | 0.5340 | 0.8951 | 0.7019 | 0.3767 | 0.7460 | 0.9559 | 0.3912 | 0.5121 | 0.8245 | 0.6058 | 0.6546 |
| 0.404 | 2020 | 0.0073 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.408 | 2040 | 0.0072 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.412 | 2060 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.416 | 2080 | 0.0073 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.42 | 2100 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.424 | 2120 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.428 | 2140 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.432 | 2160 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.436 | 2180 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.44 | 2200 | 0.007 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.444 | 2220 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.448 | 2240 | 0.0071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.452 | 2260 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.456 | 2280 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.46 | 2300 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.464 | 2320 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.468 | 2340 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.472 | 2360 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.476 | 2380 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.48 | 2400 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.484 | 2420 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.488 | 2440 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.492 | 2460 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.496 | 2480 | 0.0069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.5 | 2500 | 0.0068 | 0.3647 | 0.6883 | 0.9435 | 0.5624 | 0.8946 | 0.7065 | 0.3815 | 0.7709 | 0.9658 | 0.3993 | 0.5631 | 0.8371 | 0.6076 | 0.6681 |
| 0.504 | 2520 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.508 | 2540 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.512 | 2560 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.516 | 2580 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.52 | 2600 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.524 | 2620 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.528 | 2640 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.532 | 2660 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.536 | 2680 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.54 | 2700 | 0.0068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.544 | 2720 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.548 | 2740 | 0.0067 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.552 | 2760 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.556 | 2780 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.56 | 2800 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.564 | 2820 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.568 | 2840 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.572 | 2860 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.576 | 2880 | 0.0065 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.58 | 2900 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.584 | 2920 | 0.0065 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.588 | 2940 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.592 | 2960 | 0.0066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.596 | 2980 | 0.0065 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.6 | 3000 | 0.0064 | 0.3585 | 0.7081 | 0.9409 | 0.5474 | 0.8915 | 0.7037 | 0.3796 | 0.7763 | 0.9540 | 0.4038 | 0.5628 | 0.8424 | 0.6042 | 0.6672 |
| 0.604 | 3020 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.608 | 3040 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.612 | 3060 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.616 | 3080 | 0.0065 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.62 | 3100 | 0.0065 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.624 | 3120 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.628 | 3140 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.632 | 3160 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.636 | 3180 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.64 | 3200 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.644 | 3220 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.648 | 3240 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.652 | 3260 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.656 | 3280 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.66 | 3300 | 0.0064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.664 | 3320 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.668 | 3340 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.672 | 3360 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.676 | 3380 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.68 | 3400 | 0.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.684 | 3420 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.688 | 3440 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.692 | 3460 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.696 | 3480 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.7 | 3500 | 0.0061 | 0.3783 | 0.7080 | 0.9441 | 0.5603 | 0.8902 | 0.7022 | 0.3824 | 0.7780 | 0.9612 | 0.3995 | 0.5414 | 0.8450 | 0.6049 | 0.6689 |
| 0.704 | 3520 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.708 | 3540 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.712 | 3560 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.716 | 3580 | 0.0062 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.72 | 3600 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.724 | 3620 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.728 | 3640 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.732 | 3660 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.736 | 3680 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.74 | 3700 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.744 | 3720 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.748 | 3740 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.752 | 3760 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.756 | 3780 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.76 | 3800 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.764 | 3820 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.768 | 3840 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.772 | 3860 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.776 | 3880 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.78 | 3900 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.784 | 3920 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.788 | 3940 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.792 | 3960 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.796 | 3980 | 0.0061 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.8 | 4000 | 0.0059 | 0.3820 | 0.7028 | 0.9441 | 0.5722 | 0.8890 | 0.7135 | 0.3825 | 0.7790 | 0.9659 | 0.4012 | 0.5425 | 0.8446 | 0.6085 | 0.6714 |
| 0.804 | 4020 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.808 | 4040 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.812 | 4060 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.816 | 4080 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.82 | 4100 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.824 | 4120 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.828 | 4140 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.832 | 4160 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.836 | 4180 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.84 | 4200 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.844 | 4220 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.848 | 4240 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.852 | 4260 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.856 | 4280 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.86 | 4300 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.864 | 4320 | 0.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.868 | 4340 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.872 | 4360 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.876 | 4380 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.88 | 4400 | 0.0059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.884 | 4420 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.888 | 4440 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.892 | 4460 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.896 | 4480 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.9 | 4500 | 0.0059 | 0.3703 | 0.7111 | 0.9441 | 0.5555 | 0.8886 | 0.7251 | 0.3934 | 0.7632 | 0.9671 | 0.4052 | 0.5390 | 0.8442 | 0.6068 | 0.6703 |
| 0.904 | 4520 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.908 | 4540 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.912 | 4560 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.916 | 4580 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.92 | 4600 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.924 | 4620 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.928 | 4640 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.932 | 4660 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.936 | 4680 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.94 | 4700 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.944 | 4720 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.948 | 4740 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.952 | 4760 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.956 | 4780 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.96 | 4800 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.964 | 4820 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.968 | 4840 | 0.0058 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.972 | 4860 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.976 | 4880 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.98 | 4900 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.984 | 4920 | 0.0057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.988 | 4940 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.992 | 4960 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 0.996 | 4980 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.0 | 5000 | 0.0056 | 0.3760 | 0.7131 | 0.9441 | 0.5522 | 0.8882 | 0.7157 | 0.3980 | 0.7739 | 0.9755 | 0.3987 | 0.5492 | 0.8501 | 0.5990 | 0.6718 |
| 1.004 | 5020 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.008 | 5040 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.012 | 5060 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.016 | 5080 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.02 | 5100 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.024 | 5120 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.028 | 5140 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.032 | 5160 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.036 | 5180 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.04 | 5200 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.044 | 5220 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.048 | 5240 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.052 | 5260 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.056 | 5280 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.06 | 5300 | 0.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.064 | 5320 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.068 | 5340 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.072 | 5360 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.076 | 5380 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.08 | 5400 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.084 | 5420 | 0.0055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.088 | 5440 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.092 | 5460 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.096 | 5480 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.1 | 5500 | 0.0054 | 0.3777 | 0.7109 | 0.9367 | 0.5705 | 0.8919 | 0.7136 | 0.3956 | 0.7750 | 0.9590 | 0.3947 | 0.5336 | 0.8368 | 0.6016 | 0.6690 |
| 1.104 | 5520 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.108 | 5540 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.112 | 5560 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.116 | 5580 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.12 | 5600 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.124 | 5620 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.1280 | 5640 | 0.0054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.1320 | 5660 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.1360 | 5680 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.1400 | 5700 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.144 | 5720 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.148 | 5740 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.152 | 5760 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.156 | 5780 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.16 | 5800 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.164 | 5820 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.168 | 5840 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.172 | 5860 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.176 | 5880 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.18 | 5900 | 0.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.184 | 5920 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.188 | 5940 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.192 | 5960 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.196 | 5980 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.2 | 6000 | 0.0051 | 0.3998 | 0.7171 | 0.9446 | 0.5699 | 0.8899 | 0.7194 | 0.4022 | 0.7631 | 0.9674 | 0.3960 | 0.5395 | 0.8389 | 0.6025 | 0.6731 |
| 1.204 | 6020 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.208 | 6040 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.212 | 6060 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.216 | 6080 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.22 | 6100 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.224 | 6120 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.228 | 6140 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.232 | 6160 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.236 | 6180 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.24 | 6200 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.244 | 6220 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.248 | 6240 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.252 | 6260 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.256 | 6280 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.26 | 6300 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.264 | 6320 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.268 | 6340 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.272 | 6360 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.276 | 6380 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.28 | 6400 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.284 | 6420 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.288 | 6440 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.292 | 6460 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.296 | 6480 | 0.0052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3 | 6500 | 0.005 | 0.4047 | 0.7137 | 0.9443 | 0.5690 | 0.8998 | 0.7120 | 0.3963 | 0.7689 | 0.9829 | 0.3956 | 0.5504 | 0.8363 | 0.5999 | 0.6749 |
| 1.304 | 6520 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.308 | 6540 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.312 | 6560 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.316 | 6580 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.32 | 6600 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.324 | 6620 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.328 | 6640 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.332 | 6660 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.336 | 6680 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.34 | 6700 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3440 | 6720 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3480 | 6740 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3520 | 6760 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3560 | 6780 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3600 | 6800 | 0.0051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3640 | 6820 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3680 | 6840 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.3720 | 6860 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.376 | 6880 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.38 | 6900 | 0.005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.384 | 6920 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.388 | 6940 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.392 | 6960 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.396 | 6980 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.4 | 7000 | 0.0049 | 0.4084 | 0.7156 | 0.9441 | 0.5700 | 0.8978 | 0.7134 | 0.4024 | 0.7557 | 0.9758 | 0.3997 | 0.5521 | 0.8366 | 0.5919 | 0.6741 |
| 1.404 | 7020 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.408 | 7040 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.412 | 7060 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.416 | 7080 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.42 | 7100 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.424 | 7120 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.428 | 7140 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.432 | 7160 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.436 | 7180 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.44 | 7200 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.444 | 7220 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.448 | 7240 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.452 | 7260 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.456 | 7280 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.46 | 7300 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.464 | 7320 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.468 | 7340 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.472 | 7360 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.476 | 7380 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.48 | 7400 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.484 | 7420 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.488 | 7440 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.492 | 7460 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.496 | 7480 | 0.0049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.5 | 7500 | 0.0047 | 0.4125 | 0.7186 | 0.9443 | 0.5723 | 0.8974 | 0.6941 | 0.3962 | 0.7676 | 0.9677 | 0.3990 | 0.5455 | 0.8433 | 0.5945 | 0.6733 |
| 1.504 | 7520 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.508 | 7540 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.512 | 7560 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.516 | 7580 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.52 | 7600 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.524 | 7620 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.528 | 7640 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.532 | 7660 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.536 | 7680 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.54 | 7700 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.544 | 7720 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.548 | 7740 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.552 | 7760 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.556 | 7780 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.56 | 7800 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.564 | 7820 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.568 | 7840 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.572 | 7860 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.576 | 7880 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.58 | 7900 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.584 | 7920 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.588 | 7940 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.592 | 7960 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.596 | 7980 | 0.0048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6 | 8000 | 0.0046 | 0.4036 | 0.7208 | 0.9441 | 0.5737 | 0.8961 | 0.7160 | 0.3942 | 0.7609 | 0.9715 | 0.3936 | 0.5534 | 0.8419 | 0.6009 | 0.6747 |
| 1.604 | 8020 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.608 | 8040 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.612 | 8060 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.616 | 8080 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.62 | 8100 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.624 | 8120 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6280 | 8140 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6320 | 8160 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6360 | 8180 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6400 | 8200 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6440 | 8220 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6480 | 8240 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6520 | 8260 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6560 | 8280 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6600 | 8300 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6640 | 8320 | 0.0047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6680 | 8340 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6720 | 8360 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6760 | 8380 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6800 | 8400 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.6840 | 8420 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.688 | 8440 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.692 | 8460 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.696 | 8480 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.7 | 8500 | 0.0046 | 0.4047 | 0.7210 | 0.9443 | 0.5729 | 0.8953 | 0.7038 | 0.3987 | 0.7716 | 0.9685 | 0.3912 | 0.5620 | 0.8444 | 0.6059 | 0.6757 |
| 1.704 | 8520 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.708 | 8540 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.712 | 8560 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.716 | 8580 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.72 | 8600 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.724 | 8620 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.728 | 8640 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.732 | 8660 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.736 | 8680 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.74 | 8700 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.744 | 8720 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.748 | 8740 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.752 | 8760 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.756 | 8780 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.76 | 8800 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.764 | 8820 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.768 | 8840 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.772 | 8860 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.776 | 8880 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.78 | 8900 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.784 | 8920 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.788 | 8940 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.792 | 8960 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.796 | 8980 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8 | 9000 | 0.0045 | 0.4126 | 0.7250 | 0.9443 | 0.5742 | 0.8927 | 0.7178 | 0.3952 | 0.7688 | 0.9681 | 0.4031 | 0.5558 | 0.8451 | 0.6057 | 0.6776 |
| 1.804 | 9020 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.808 | 9040 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.812 | 9060 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8160 | 9080 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8200 | 9100 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8240 | 9120 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8280 | 9140 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8320 | 9160 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8360 | 9180 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8400 | 9200 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8440 | 9220 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8480 | 9240 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8520 | 9260 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8560 | 9280 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8600 | 9300 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8640 | 9320 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8680 | 9340 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.8720 | 9360 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.876 | 9380 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.88 | 9400 | 0.0046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.884 | 9420 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.888 | 9440 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.892 | 9460 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.896 | 9480 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.9 | 9500 | 0.0045 | 0.4140 | 0.7249 | 0.9452 | 0.5728 | 0.8944 | 0.7147 | 0.3917 | 0.7648 | 0.9679 | 0.4018 | 0.5640 | 0.8311 | 0.6013 | 0.6760 |
| 1.904 | 9520 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.908 | 9540 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.912 | 9560 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.916 | 9580 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.92 | 9600 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.924 | 9620 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.928 | 9640 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.932 | 9660 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.936 | 9680 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.94 | 9700 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.944 | 9720 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.948 | 9740 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.952 | 9760 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.956 | 9780 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.96 | 9800 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.964 | 9820 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.968 | 9840 | 0.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.972 | 9860 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.976 | 9880 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.98 | 9900 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.984 | 9920 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.988 | 9940 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.992 | 9960 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 1.996 | 9980 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.0 | 10000 | 0.0044 | 0.4098 | 0.7192 | 0.9443 | 0.5594 | 0.8970 | 0.7056 | 0.3964 | 0.7729 | 0.9709 | 0.4013 | 0.5623 | 0.8414 | 0.5960 | 0.6751 |
| 2.004 | 10020 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.008 | 10040 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.012 | 10060 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.016 | 10080 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.02 | 10100 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.024 | 10120 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.028 | 10140 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.032 | 10160 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.036 | 10180 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.04 | 10200 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.044 | 10220 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.048 | 10240 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.052 | 10260 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.056 | 10280 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.06 | 10300 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.064 | 10320 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.068 | 10340 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.072 | 10360 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.076 | 10380 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.08 | 10400 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.084 | 10420 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.088 | 10440 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.092 | 10460 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.096 | 10480 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.1 | 10500 | 0.0043 | 0.4131 | 0.7248 | 0.9443 | 0.5673 | 0.9022 | 0.7101 | 0.3982 | 0.7736 | 0.9652 | 0.4000 | 0.5649 | 0.8322 | 0.6020 | 0.6768 |
| 2.104 | 10520 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.108 | 10540 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.112 | 10560 | 0.0044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.116 | 10580 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.12 | 10600 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.124 | 10620 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.128 | 10640 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.132 | 10660 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.136 | 10680 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.14 | 10700 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.144 | 10720 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.148 | 10740 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.152 | 10760 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.156 | 10780 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.16 | 10800 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.164 | 10820 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.168 | 10840 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.172 | 10860 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.176 | 10880 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.18 | 10900 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.184 | 10920 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.188 | 10940 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.192 | 10960 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.196 | 10980 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2 | 11000 | 0.0042 | 0.4191 | 0.7187 | 0.9443 | 0.5652 | 0.9001 | 0.7071 | 0.4007 | 0.7631 | 0.9605 | 0.3964 | 0.5631 | 0.8363 | 0.5962 | 0.6747 |
| 2.204 | 11020 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.208 | 11040 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.212 | 11060 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.216 | 11080 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.22 | 11100 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.224 | 11120 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.228 | 11140 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.232 | 11160 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2360 | 11180 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.24 | 11200 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2440 | 11220 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.248 | 11240 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.252 | 11260 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2560 | 11280 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.26 | 11300 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2640 | 11320 | 0.0043 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.268 | 11340 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2720 | 11360 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.276 | 11380 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.2800 | 11400 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.284 | 11420 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.288 | 11440 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.292 | 11460 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.296 | 11480 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.3 | 11500 | 0.0041 | 0.4129 | 0.7232 | 0.9443 | 0.5662 | 0.9020 | 0.7100 | 0.3936 | 0.7655 | 0.9750 | 0.3956 | 0.5633 | 0.8368 | 0.5952 | 0.6757 |
| 2.304 | 11520 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.308 | 11540 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.312 | 11560 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.316 | 11580 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.32 | 11600 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.324 | 11620 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.328 | 11640 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.332 | 11660 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.336 | 11680 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.34 | 11700 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.344 | 11720 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.348 | 11740 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.352 | 11760 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.356 | 11780 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.36 | 11800 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.364 | 11820 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.368 | 11840 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.372 | 11860 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.376 | 11880 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.38 | 11900 | 0.0042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.384 | 11920 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.388 | 11940 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.392 | 11960 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.396 | 11980 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.4 | 12000 | 0.0041 | 0.4152 | 0.7204 | 0.9443 | 0.5601 | 0.8967 | 0.7104 | 0.3978 | 0.7688 | 0.9751 | 0.3918 | 0.5609 | 0.8368 | 0.5988 | 0.6752 |
| 2.404 | 12020 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.408 | 12040 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.412 | 12060 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.416 | 12080 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.42 | 12100 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.424 | 12120 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.428 | 12140 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.432 | 12160 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.436 | 12180 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.44 | 12200 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.444 | 12220 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.448 | 12240 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.452 | 12260 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.456 | 12280 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.46 | 12300 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.464 | 12320 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.468 | 12340 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.472 | 12360 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.476 | 12380 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.48 | 12400 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.484 | 12420 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.488 | 12440 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.492 | 12460 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.496 | 12480 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.5 | 12500 | 0.004 | 0.4194 | 0.7222 | 0.9443 | 0.5677 | 0.9031 | 0.7103 | 0.3955 | 0.7726 | 0.9708 | 0.3966 | 0.5573 | 0.8380 | 0.5966 | 0.6765 |
| 2.504 | 12520 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.508 | 12540 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.512 | 12560 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.516 | 12580 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.52 | 12600 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.524 | 12620 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.528 | 12640 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.532 | 12660 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.536 | 12680 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.54 | 12700 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.544 | 12720 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.548 | 12740 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.552 | 12760 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.556 | 12780 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.56 | 12800 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.564 | 12820 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.568 | 12840 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.572 | 12860 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.576 | 12880 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.58 | 12900 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.584 | 12920 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.588 | 12940 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.592 | 12960 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.596 | 12980 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.6 | 13000 | 0.004 | 0.4201 | 0.7257 | 0.9443 | 0.5676 | 0.9012 | 0.7103 | 0.3984 | 0.7577 | 0.9705 | 0.4011 | 0.5609 | 0.8366 | 0.5990 | 0.6764 |
| 2.604 | 13020 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.608 | 13040 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.612 | 13060 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.616 | 13080 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.62 | 13100 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.624 | 13120 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.628 | 13140 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.632 | 13160 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.636 | 13180 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.64 | 13200 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.644 | 13220 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.648 | 13240 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.652 | 13260 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.656 | 13280 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.66 | 13300 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.664 | 13320 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.668 | 13340 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.672 | 13360 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.676 | 13380 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.68 | 13400 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.684 | 13420 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.6880 | 13440 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.692 | 13460 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.6960 | 13480 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7 | 13500 | 0.004 | 0.4158 | 0.7244 | 0.9452 | 0.5662 | 0.9012 | 0.7042 | 0.3966 | 0.7709 | 0.9705 | 0.3919 | 0.5640 | 0.8370 | 0.5941 | 0.6755 |
| 2.7040 | 13520 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.708 | 13540 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7120 | 13560 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.716 | 13580 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7200 | 13600 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.724 | 13620 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7280 | 13640 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.732 | 13660 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7360 | 13680 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.74 | 13700 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7440 | 13720 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.748 | 13740 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.752 | 13760 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7560 | 13780 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.76 | 13800 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7640 | 13820 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.768 | 13840 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7720 | 13860 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.776 | 13880 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7800 | 13900 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.784 | 13920 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7880 | 13940 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.792 | 13960 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.7960 | 13980 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.8 | 14000 | 0.0039 | 0.4230 | 0.7262 | 0.9443 | 0.5669 | 0.9028 | 0.7100 | 0.3930 | 0.7645 | 0.9750 | 0.3998 | 0.5635 | 0.8366 | 0.5975 | 0.6772 |
| 2.8040 | 14020 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.808 | 14040 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.8120 | 14060 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.816 | 14080 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.82 | 14100 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.824 | 14120 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.828 | 14140 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.832 | 14160 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.836 | 14180 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.84 | 14200 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.844 | 14220 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.848 | 14240 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.852 | 14260 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.856 | 14280 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.86 | 14300 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.864 | 14320 | 0.0041 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.868 | 14340 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.872 | 14360 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.876 | 14380 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.88 | 14400 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.884 | 14420 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.888 | 14440 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.892 | 14460 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.896 | 14480 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.9 | 14500 | 0.004 | 0.4177 | 0.7296 | 0.9452 | 0.5663 | 0.9012 | 0.7095 | 0.3917 | 0.7645 | 0.9708 | 0.3985 | 0.5609 | 0.8369 | 0.5952 | 0.6760 |
| 2.904 | 14520 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.908 | 14540 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.912 | 14560 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.916 | 14580 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.92 | 14600 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.924 | 14620 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.928 | 14640 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.932 | 14660 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.936 | 14680 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.94 | 14700 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.944 | 14720 | 0.0038 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.948 | 14740 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.952 | 14760 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.956 | 14780 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.96 | 14800 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.964 | 14820 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.968 | 14840 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.972 | 14860 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.976 | 14880 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.98 | 14900 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.984 | 14920 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.988 | 14940 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.992 | 14960 | 0.0039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 2.996 | 14980 | 0.004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
| 3.0 | 15000 | 0.0039 | 0.4148 | 0.7296 | 0.9452 | 0.5670 | 0.9012 | 0.7089 | 0.3957 | 0.7645 | 0.9691 | 0.3987 | 0.5609 | 0.8372 | 0.5927 | 0.6758 |
### Framework Versions
- Python: 3.11.10
- Sentence Transformers: 3.5.0.dev0
- Transformers: 4.48.2
- PyTorch: 2.5.1+cu124
- Accelerate: 1.1.1
- Datasets: 2.21.0
- Tokenizers: 0.21.0
## Citation
### BibTeX
#### Sentence Transformers
```bibtex
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084"
}
```
#### PyLate
```bibtex
@misc{PyLate,
title={PyLate: Flexible Training and Retrieval for Late Interaction Models},
author={Chaffin, Antoine and Sourty, Raphaƫl},
url={https://github.com/lightonai/pylate},
year={2024}
}
```
#### GTE-ModernColBERT
```bibtex
@misc{GTE-ModernColBERT,
title={GTE-ModernColBERT},
author={Chaffin, Antoine},
url={https://huggingface.co/lightonai/GTE-ModernColBERT-v1},
year={2025}
}
```