From bae6e931bdfc9334620e0e46c1bff215c54a22b2 Mon Sep 17 00:00:00 2001 From: Krzysztof Kudrynski Date: Tue, 13 Aug 2019 23:27:54 +0200 Subject: [PATCH] updating BERT (single node LAMB support) --- PyTorch/LanguageModeling/BERT/Dockerfile | 27 +- PyTorch/LanguageModeling/BERT/README.md | 662 +- PyTorch/LanguageModeling/BERT/bind_pyt.py | 124 + PyTorch/LanguageModeling/BERT/config_DGX1.sh | 21 + PyTorch/LanguageModeling/BERT/config_DGX2.sh | 21 + .../BERT/config_DGX2_16x16x64x4_wiki_books.sh | 19 + ...onfig_DGX2_16x16x8x16_wiki_books_phase2.sh | 21 + ...onfig_DGX2_1x16x8x256_wiki_books_phase2.sh | 21 + .../BERT/config_DGX2_2x16x40.sh | 28 + .../config_DGX2_46x16x32x3_wiki_books_fp32.sh | 22 + .../BERT/config_DGX2_46x16x48x2_wiki_books.sh | 19 + ..._DGX2_46x16x4x12_wiki_books_phase2_fp32.sh | 21 + ...config_DGX2_46x16x8x6_wiki_books_phase2.sh | 21 + .../BERT/config_DGX2_48x16x64x2_wiki_books.sh | 19 + ...config_DGX2_48x16x8x8_wiki_books_phase2.sh | 19 + .../config_DGX2_64x16x32x2_fp32_wiki_books.sh | 21 + ...g_DGX2_64x16x4x8_wiki_books_phase2_fp32.sh | 21 + .../BERT/config_DGX2_64x16x64_wiki_books.sh | 19 + ...config_DGX2_64x16x8x4_wiki_books_phase2.sh | 21 + ...config_DGX2_64x16x8x8_wiki_books_phase2.sh | 19 + .../BERT/config_DGX2_92x16x48_wiki_books.sh | 21 + .../BERT/config_DGX2_92x16x64_wiki_books.sh | 21 + ...config_DGX2_92x16x8x3_wiki_books_phase2.sh | 22 + ...config_DGX2_92x16x8x4_wiki_books_phase2.sh | 21 + .../BERT/config_DGX2_96x16x40_wiki_books.sh | 19 + .../BERT/config_DGX2_96x16x64_wiki_books.sh | 19 + ...config_DGX2_96x16x8x3_wiki_books_phase2.sh | 19 + ...config_DGX2_96x16x8x4_wiki_books_phase2.sh | 19 + .../BERT/config_DGX2_phase2.sh | 19 + .../BERT/data/BooksDownloader.py | 16 + .../BERT/data/BookscorpusTextFormatting.py | 21 + .../LanguageModeling/BERT/data/Downloader.py | 80 + .../data/GooglePretrainedWeightDownloader.py | 147 + .../BERT/data/MRPCDownloader.py | 33 + .../data/NVIDIAPretrainedWeightDownloader.py | 16 + .../BERT/data/SquadDownloader.py | 43 + .../BERT/data/TextSharding.py | 316 + .../BERT/data/WikiDownloader.py | 58 + .../BERT/data/WikicorpusTextFormatting.py | 35 + .../LanguageModeling/BERT/data/__init__.py | 0 .../LanguageModeling/BERT/data/bertPrep.py | 345 + .../data/bookcorpus/clean_and_merge_text.py | 23 - .../data/bookcorpus/download_bookcorpus.sh | 9 - .../BERT/data/create_datasets_from_start.sh | 45 +- .../data/merge_datasets_after_creation.sh | 29 - .../BERT/data/utils/config.sh | 24 - .../BERT/data/utils/create_mixed_dataset.py | 160 - .../data/utils/create_mixed_dataset_ids.py | 134 - .../BERT/data/utils/preprocessing.sh | 23 - .../data/utils/preprocessing_xargs_wrapper.sh | 15 - .../data/utils/sentence_segmentation_nltk.py | 28 - .../BERT/data/utils/shard_text_input_file.py | 47 - .../wikipedia_corpus/download_wikipedia.sh | 30 - .../wikipedia_corpus/remove_tags_and_clean.py | 39 - .../LanguageModeling/BERT/fused_adam_local.py | 205 - PyTorch/LanguageModeling/BERT/modeling.py | 114 +- PyTorch/LanguageModeling/BERT/optimization.py | 280 +- PyTorch/LanguageModeling/BERT/run.sub | 203 + PyTorch/LanguageModeling/BERT/run_and_time.sh | 39 + .../LanguageModeling/BERT/run_pretraining.py | 557 +- .../BERT/run_pretraining_inference.py | 29 +- PyTorch/LanguageModeling/BERT/run_squad.py | 9 +- .../BERT/scripts/docker/build.sh | 3 +- .../BERT/scripts/docker/launch.sh | 17 +- .../BERT/scripts/run_pretraining.sh | 172 +- .../BERT/scripts/run_pretraining_inference.sh | 49 +- .../BERT/scripts/run_squad.sh | 2 +- .../BERT/scripts/start_pretraining.sh | 2 +- PyTorch/LanguageModeling/BERT/utils.py | 12 + .../BERT/vocab/download_models.py | 123 - PyTorch/LanguageModeling/BERT/vocab/vocab | 30522 ---------------- 71 files changed, 3302 insertions(+), 32098 deletions(-) create mode 100644 PyTorch/LanguageModeling/BERT/bind_pyt.py create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX1.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_16x16x64x4_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_16x16x8x16_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_1x16x8x256_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_2x16x40.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_46x16x32x3_wiki_books_fp32.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_46x16x48x2_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_46x16x4x12_wiki_books_phase2_fp32.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_46x16x8x6_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_48x16x64x2_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_48x16x8x8_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_64x16x32x2_fp32_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_64x16x4x8_wiki_books_phase2_fp32.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_64x16x64_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x4_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x8_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_92x16x48_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_92x16x64_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x3_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x4_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_96x16x40_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_96x16x64_wiki_books.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x3_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x4_wiki_books_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/config_DGX2_phase2.sh create mode 100644 PyTorch/LanguageModeling/BERT/data/BooksDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/BookscorpusTextFormatting.py create mode 100644 PyTorch/LanguageModeling/BERT/data/Downloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/GooglePretrainedWeightDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/MRPCDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/NVIDIAPretrainedWeightDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/SquadDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/TextSharding.py create mode 100644 PyTorch/LanguageModeling/BERT/data/WikiDownloader.py create mode 100644 PyTorch/LanguageModeling/BERT/data/WikicorpusTextFormatting.py create mode 100644 PyTorch/LanguageModeling/BERT/data/__init__.py create mode 100644 PyTorch/LanguageModeling/BERT/data/bertPrep.py delete mode 100644 PyTorch/LanguageModeling/BERT/data/bookcorpus/clean_and_merge_text.py delete mode 100755 PyTorch/LanguageModeling/BERT/data/bookcorpus/download_bookcorpus.sh delete mode 100755 PyTorch/LanguageModeling/BERT/data/merge_datasets_after_creation.sh delete mode 100755 PyTorch/LanguageModeling/BERT/data/utils/config.sh delete mode 100644 PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset.py delete mode 100644 PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset_ids.py delete mode 100755 PyTorch/LanguageModeling/BERT/data/utils/preprocessing.sh delete mode 100755 PyTorch/LanguageModeling/BERT/data/utils/preprocessing_xargs_wrapper.sh delete mode 100644 PyTorch/LanguageModeling/BERT/data/utils/sentence_segmentation_nltk.py delete mode 100644 PyTorch/LanguageModeling/BERT/data/utils/shard_text_input_file.py delete mode 100755 PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/download_wikipedia.sh delete mode 100644 PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/remove_tags_and_clean.py delete mode 100644 PyTorch/LanguageModeling/BERT/fused_adam_local.py create mode 100644 PyTorch/LanguageModeling/BERT/run.sub create mode 100755 PyTorch/LanguageModeling/BERT/run_and_time.sh create mode 100644 PyTorch/LanguageModeling/BERT/utils.py delete mode 100644 PyTorch/LanguageModeling/BERT/vocab/download_models.py delete mode 100644 PyTorch/LanguageModeling/BERT/vocab/vocab diff --git a/PyTorch/LanguageModeling/BERT/Dockerfile b/PyTorch/LanguageModeling/BERT/Dockerfile index 2c946098..0130c6b4 100644 --- a/PyTorch/LanguageModeling/BERT/Dockerfile +++ b/PyTorch/LanguageModeling/BERT/Dockerfile @@ -1,27 +1,28 @@ -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:19.06-py3 +ARG FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:19.07-py3 FROM ${FROM_IMAGE_NAME} RUN apt-get update && apt-get install -y pbzip2 pv bzip2 cabextract +ENV BERT_PREP_WORKING_DIR /workspace/bert/data + +WORKDIR /opt +RUN rm -rf /opt/pytorch/apex ; \ + git clone https://github.com/NVIDIA/apex.git pytorch/apex ; \ + cd pytorch/apex ; \ + pip uninstall --yes apex; \ + git checkout 880ab925bce9f817a93988b021e12db5f67f7787; \ + git pull; \ + pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" . #WORKDIR /opt #RUN cd pytorch/apex \ -# && git fetch origin pull/182/head:norm_fix \ -# && git checkout norm_fix \ +# && git fetch origin pull/334/head:multi_tensor_lamb_optimizer \ +# && git checkout multi_tensor_lamb_optimizer \ # && python setup.py develop --cuda_ext --cpp_ext - -WORKDIR /opt -RUN cd pytorch/apex ; \ - pip uninstall apex; \ - pip uninstall apex; \ - git checkout master; \ - git pull; \ - pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" . - WORKDIR /workspace RUN git clone https://github.com/attardi/wikiextractor.git RUN git clone https://github.com/soskek/bookcorpus.git WORKDIR /workspace/bert +RUN pip install tqdm boto3 requests six ipdb h5py html2text nltk progressbar COPY . . -RUN pip install tqdm boto3 requests six ipdb h5py html2text nltk progressbar \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/README.md b/PyTorch/LanguageModeling/BERT/README.md index b06399ed..b309fbf8 100644 --- a/PyTorch/LanguageModeling/BERT/README.md +++ b/PyTorch/LanguageModeling/BERT/README.md @@ -17,40 +17,67 @@ This repository provides a script and recipe to train the BERT model to achieve - [Advanced](#advanced) * [Scripts and sample code](#scripts-and-sample-code) * [Parameters](#parameters) + * [Pre-training parameters](#pre-training-parameters) + * [Fine-tuning parameters](#fine-tuning-parameters) * [Command-line options](#command-line-options) * [Getting the data](#getting-the-data) * [Dataset guidelines](#dataset-guidelines) * [Multi-dataset](#multi-dataset) + * [Relocating hdf5 files](#relocating-hdf5-files) + * [Inter sequence-pair mixing](#inter-sequence-pair-mixing) + * [Retaining document-level granularity](#retaining-document-level-granularity) * [Training process](#training-process) + * [Pre-training](#pre-training) + * [Fine-tuning](#fine-tuning) * [Inference process](#inference-process) + * [Pre-training inference](#pre-training-inference) + * [Fine-tuning inference](#fine-tuning-inference) - [Performance](#performance) * [Benchmarking](#benchmarking) * [Training performance benchmark](#training-performance-benchmark) * [Inference performance benchmark](#inference-performance-benchmark) * [Results](#results) * [Training accuracy results](#training-accuracy-results) - * [NVIDIA DGX-1 (8x V100 16G)](#nvidia-dgx-1-8x-v100-16g) + * [Pre-training loss results](#pre-training-loss-results) + * [Fine-tuning accuracy results](#fine-tuning-accuracy-results) * [Training stability test](#training-stability-test) - * [Training performance results](#training-performance-results) - * [Training performance: NVIDIA DGX-1 (8x V100 16G)](#nvidia-dgx-1-8x-v100-16g-1) - * [Training performance: NVIDIA DGX-1 (8x V100 32G)](#nvidia-dgx-1-8x-v100-32g) - * [Training performance: NVIDIA DGX-2 (16x V100 32G)](#nvidia-dgx-2-16x-v100-32g) - * [Inference performance results](#inference-performance-results) - * [Inference performance: NVIDIA DGX-1 (8x V100 16G)](#inference-performance-nvidia-dgx-1-8x-v100-16g) - * [Inference performance: NVIDIA DGX-1 (8x V100 32G)](#inference-performance-nvidia-dgx-1-8x-v100-32g) - * [Inference performance: NVIDIA DGX-2 (16x V100 32G)](#inference-performance-nvidia-dgx-2-16x-v100-32g) + * [Pre-training stability test](#pre-training-stability-test) + * [Fine-tuning stability test](#fine-tuning-stability-test) + * [Training performance results](#training-performance-results) + * [Training performance: NVIDIA DGX-1 (8x V100 16G)](#training-performance-nvidia-dgx-1-8x-v100-16g) + * [Pre-training NVIDIA DGX-1 With 16G](#pre-training-nvidia-dgx-1-with-16g) + * [Fine-tuning NVIDIA DGX-1 With 16G](#fine-tuning-nvidia-dgx-1-with-16g) + * [Training performance: NVIDIA DGX-1 (8x V100 32G)](#training-performance-nvidia-dgx-1-8x-v100-32g) + * [Pre-training NVIDIA DGX-1 With 32G](#pre-training-nvidia-dgx-1-with-32g) + * [Fine-tuning NVIDIA DGX-1 With 32G](#fine-tuning-nvidia-dgx-1-with-32g) + * [Training performance: NVIDIA DGX-2 (16x V100 32G)](#training-performance-nvidia-dgx-2-16x-v100-32g) + * [Pre-training NVIDIA DGX-2 With 32G](#pre-training-nvidia-dgx-2-with-32g) + * [Fine-tuning NVIDIA DGX-2 With 32G](#fine-tuning-nvidia-dgx-2-with-32g) + * [Inference performance results](#inference-performance-results) + * [Inference performance: NVIDIA DGX-1 (1x V100 16G)](#inference-performance-nvidia-dgx-1-1x-v100-16g) + * [Pre-training inference on NVIDIA DGX-1 with 16G](#pre-training-inference-on-nvidia-dgx-1-with-16g) + * [Fine-tuning inference on NVIDIA DGX-1 with 16G](#fine-tuning-inference-on-nvidia-dgx-1-with-16g) + * [Inference performance: NVIDIA DGX-1 (1x V100 32G)](#inference-performance-nvidia-dgx-1-1x-v100-32g) + * [Pre-training inference on NVIDIA DGX-1 with 32G](#pre-training-inference-on-nvidia-dgx-1-with-32g) + * [Fine-tuning inference on NVIDIA DGX-1 with 32G](#fine-tuning-inference-on-nvidia-dgx-1-with-32g) + * [Inference performance: NVIDIA DGX-2 (1x V100 32G)](#inference-performance-nvidia-dgx-2-1x-v100-32g) + * [Pre-training inference on NVIDIA DGX-2 with 32G](#pre-training-inference-on-nvidia-dgx-2-with-32g) + * [Fine-tuning inference on NVIDIA DGX-2 with 32G](#fine-tuning-inference-on-nvidia-dgx-2-with-32g) - [Release notes](#release-notes) * [Changelog](#changelog) * [Known issues](#known-issues) + ## Model overview -BERT, or Bidirectional Encoder Representations from Transformers, is a new method of pre-training language representations which obtains state-of-the-art results on a wide array of Natural Language Processing (NLP) tasks. This model is based on the [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) paper. NVIDIA's implementation of BERT is an optimized version of the [Hugging Face implementation](https://github.com/huggingface/pytorch-pretrained-BERT), leveraging mixed precision arithmetic and tensor cores on V100 GPUs for faster training times while maintaining target accuracy. + +BERT, or Bidirectional Encoder Representations from Transformers, is a new method of pre-training language representations which obtains state-of-the-art results on a wide array of Natural Language Processing (NLP) tasks. This model is based on the [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) paper. NVIDIA's implementation of BERT is an optimized version of the [Hugging Face implementation](https://github.com/huggingface/pytorch-pretrained-BERT), leveraging mixed precision arithmetic and Tensor Cores on V100 GPUs for faster training times while maintaining target accuracy. The repository also contains scripts to interactively launch data download, training, benchmarking and inference routines in a Docker container for both pre-training and fine-tuning for tasks such as question answering. The major differences between the original implementation of the paper and this version of BERT are as follows: - Scripts to download Wikipedia and BookCorpus datasets - Scripts to preprocess downloaded data or a custom corpus into inputs and targets for pre-training in a modular fashion. -- Fused CUDA kernels for better performance with Adam and LayerNorm +- Fused [LAMB](https://arxiv.org/pdf/1904.00962.pdf) optimizer to support training with larger batches +- Fused CUDA kernels for better performance LayerNorm - Automatic Mixed precision training support Other publicly available implementations of BERT include: @@ -58,8 +85,7 @@ Other publicly available implementations of BERT include: 1. [Google's official implementation](https://github.com/google-research/bert) 2. [codertimo](https://github.com/codertimo/BERT-pytorch) - -This model trains with mixed precision tensor cores on Volta and provides a push-button solution to pretraining on a corpus of choice. As a result, researchers can get results 4x faster than training without tensor cores. This model is tested against each NGC monthly container release to ensure consistent accuracy and performance over time. +This model trains with mixed precision Tensor Cores on Volta and provides a push-button solution to pretraining on a corpus of choice. As a result, researchers can get results 4x faster than training without Tensor Cores. This model is tested against each NGC monthly container release to ensure consistent accuracy and performance over time. ### Model architecture @@ -90,13 +116,19 @@ The following features are supported by this model. |:---------:|:----------:| |APEX AMP|Yes| |APEX DDP|Yes| +|LAMB|Yes| #### Features -[APEX](https://github.com/NVIDIA/apex) is a Pytorch extension with NVIDIA-maintained utilities to streamline mixed precision and distributed training. [DDP](https://nvidia.github.io/apex/parallel.html) stands for DistributedDataParallel and is used for multi gpu training, where as [AMP](https://nvidia.github.io/apex/amp.html) is an abbreviation used for mixed precision training. + +[APEX](https://github.com/NVIDIA/apex) is a Pytorch extension with NVIDIA-maintained utilities to streamline mixed precision and distributed training. + +[DDP](https://nvidia.github.io/apex/parallel.html) stands for DistributedDataParallel and is used for multi-GPU training, where as [AMP](https://nvidia.github.io/apex/amp.html) is an abbreviation used for automatic mixed precision training. + +[LAMB](https://arxiv.org/pdf/1904.00962.pdf) stands for Layerwise Adaptive Moments based optimizer, is a large batch optimization technique that helps accelerate training of deep neural networks using large minibatches. It allows using a global batch size of 65536 and 32768 on sequence lengths 128 and 512 respectively, compared to a batch size of 256 for Adam. The optimized implementation accumulates 1024 gradients batches in phase 1 and 4096 steps in phase 2 before updating weights once. This results in 15% training speedup. On multi-node systems, LAMB allows scaling up to 1024 GPUs resulting in training speedups of up to 72x in comparison to [Adam](https://arxiv.org/pdf/1412.6980.pdf). Adam has limitations on the learning rate that can be used since it is applied globally on all parameters whereas LAMB follows a layerwise learning rate strategy. ### Mixed precision training -Mixed precision is the combined use of different numerical precisions in a computational method. [Mixed precision](https://arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format, while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [tensor cores](https://developer.nvidia.com/tensor-cores) in the Volta and Turing architecture, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using mixed precision training requires two steps: +Mixed precision is the combined use of different numerical precisions in a computational method. [Mixed precision](https://arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format, while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [Tensor Cores](https://developer.nvidia.com/tensor-cores) in the Volta and Turing architecture, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using mixed precision training requires two steps: 1. Porting the model to use the FP16 data type where appropriate. 2. Adding loss scaling to preserve small gradient values. @@ -105,29 +137,27 @@ For information about: - How to train using mixed precision, see the [Mixed Precision Training](https://arxiv.org/abs/1710.03740) paper and [Training With Mixed Precision](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html) documentation. - Techniques used for mixed precision training, see the [Mixed-Precision Training of Deep Neural Networks](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) blog. - -APEX tools for mixed precision training, see the [NVIDIA Apex: Tools for Easy Mixed-Precision Training in PyTorch](https://devblogs.nvidia.com/apex-pytorch-easy-mixed-precision-training/). - +- APEX tools for mixed precision training, see the [NVIDIA Apex: Tools for Easy Mixed-Precision Training in PyTorch](https://devblogs.nvidia.com/apex-pytorch-easy-mixed-precision-training/). + #### Enabling mixed precision In this repository, mixed precision training is enabled by NVIDIA’s APEX library. The APEX library has an automatic mixed precision module that allows mixed precision to be enabled with minimal code changes. -Automatic mixed precision can be enabled with the following code changes: +Automatic mixed precision can be enabled with the following code changes: ``` from apex import amp if fp16: - # Wrap optimizer and model - model, optimizer = amp.initialize(model, optimizer, opt_level=, loss_scale=”dynamic”) + # Wrap optimizer and model + model, optimizer = amp.initialize(model, optimizer, opt_level=, loss_scale=”dynamic”) if fp16: - with amp.scale_loss(loss, optimizer) as scaled_loss: - scaled_loss.backward() + with amp.scale_loss(loss, optimizer) as scaled_loss: + scaled_loss.backward() ``` - -Where is the optimization level. In the pretraining, “O2” is set as the optimization level. Mixed precision training can be turned on by passing in the argument fp16 to the pre-training and fine-tuning Python scripts. Shell scripts all have a positional argument available to enable mixed precision training. - +Where `` is the optimization level. In the pretraining, “O2” is set as the optimization level. Mixed precision training can be turned on by passing the `fp16` argument to the pre-training and fine-tuning Python scripts. Shell scripts all have a positional argument available to enable mixed precision training. + ## Setup The following section lists the requirements in order to start training the BERT model. @@ -137,11 +167,10 @@ The following section lists the requirements in order to start training the BERT This repository contains Dockerfile which extends the PyTorch NGC container and encapsulates some dependencies. Aside from these dependencies, ensure you have the following components: - [NVIDIA Docker](https://github.com/NVIDIA/nvidia-docker) -- [PyTorch 19.06-py3 NGC container or later](https://ngc.nvidia.com/registry/nvidia-pytorch) -- [NVIDIA Volta based GPU](https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/) +- [PyTorch 19.07-py3 NGC container or later](https://ngc.nvidia.com/registry/nvidia-pytorch) +- [NVIDIA Volta](https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/) or [Turing](https://www.nvidia.com/en-us/geforce/turing/) based GPU For more information about how to get started with NGC containers, see the following sections from the NVIDIA GPU Cloud Documentation and the Deep Learning Documentation: - - [Getting Started Using NVIDIA GPU Cloud](https://docs.nvidia.com/ngc/ngc-getting-started-guide/index.html) - [Accessing And Pulling From The NGC Container Registry](https://docs.nvidia.com/deeplearning/dgx/user-guide/index.html#accessing_registry) - [Running PyTorch](https://docs.nvidia.com/deeplearning/dgx/pytorch-release-notes/running.html#running) @@ -150,8 +179,8 @@ For those unable to use the PyTorch NGC container, to set up the required enviro ## Quick Start Guide -To train your model using mixed precision with tensor cores or using FP32, perform the following steps using the default parameters of the BERT model. For the specifics concerning training and inference, see [Details](https://docs.google.com/document/d/1O5f8pzO89rFhFgysrXAN6Y3di5oyhDlf_K6Q63VrMWM/edit#heading=h.fdn4uvtc4wbi). - +To train your model using mixed precision with Tensor Cores or using FP32, perform the following steps using the default parameters of the BERT model. The default parameters for pretraining have been set to run on 8 x V100 32G cards. For the specifics concerning training and inference, see [Advanced](#advanced). + 1. Clone the repository. @@ -162,25 +191,27 @@ To train your model using mixed precision with tensor cores or using FP32, perfo 2. Download NVIDIA pretrained checkpoint. -If you wish to use a pretrained checkpoint, visit [NGC](https://ngc.nvidia.com/catalog/models) and browse the available models. This downloaded checkpoint is used to fine-tune on SQuAD. +If you want to use a pretrained checkpoint, visit [NGC](https://ngc.nvidia.com/catalog/models) and browse the available models. This downloaded checkpoint is used to fine-tune on SQuAD. -3. Build the BERT 19.06 NGC container. +3. Build the BERT 19.07 NGC container. `bash scripts/docker/build.sh` 4. Start an interactive session in the NGC container to run training/inference. -`bash scripts/docker/launch.sh ` +`bash scripts/docker/launch.sh ` `` - Path to `data` folder in the cloned repository. This directory contains scripts needed to download datasets and where the data will be downloaded. `` - Path to `vocab` folder in the cloned repository. This is the vocabulary with which BERT checkpoint is pretrained. -`` - Path to folder contaning the downloaded pretrained checkpoint from step 2 for fine-tuning. +`` - Path to folder containing the downloaded pretrained checkpoint from step 2 for fine-tuning. -The above paths present on the local machine get mouted to predefined locations in the container. +`` - Path to folder where logs and checkpoints will be saved. -`data` and `vocab` are a part of .dockerignore in order to provide the user the ability to mount datasets of choice and not necessarily the ones downloaded by the script below. In this case, `` points to users corpus. Refer to section [Getting the data](#getting-the-data) for more details on how to process a custom corpus as required for BERT pretraining. +The above paths present on the local machine get mounted to predefined locations in the container. + +`data` and `vocab` are a part of `.dockerignore` in order to provide the user the ability to mount datasets of choice and not necessarily the ones downloaded by the script below. In this case, `` points to users corpus. Refer to the [Getting the data](#getting-the-data) section for more details on how to process a custom corpus as required for BERT pretraining. 5. Download and preprocess the dataset. @@ -192,20 +223,10 @@ This repository provides scripts to download, verify and extract the following d - Wikipedia (pre-training) - BookCorpus (pre-training) -To download, verify, and extract the datasets, run: -`bash scripts/data_download.sh` +To download, verify, extract the datasets, and create the shards in hdf5 format, run: +`/workspace/bert/data/create_datasets_from_start.sh` - Once data download is complete, the following folders should exist in `` specified in step 4: - -- SQuAD v1.1 - data/squad/v1.1 -- SWAG - data/swag -- MRPC - data/glue/MRPC -- Vocab files - vocab/uncased_L-24_H-1024_A-16 -- Wikipedia - data/wikipedia_corpus/hdf5_shards -- BookCorpus - data/bookcorpus/hdf5_shards -- Mixed Wikipedia/Bookcorpus - data/merged_wiki+books/hdf5_shards - -6. Start Pre training. +6. Start pre-training. BERT is designed to pre-train deep bidirectional representations for language representations. The following scripts are to replicate pretraining on Wikipedia+Book Corpus from this [paper](https://arxiv.org/pdf/1810.04805.pdf). These scripts are general and can be used for pre-training language representations on any corpus of choice. @@ -213,8 +234,8 @@ From within the container, you can use the following script to run pre-training. `bash scripts/run_pretraining.sh 14 0.875e-4 fp16 16 0.01 1142857 2000 false true` More details can be found in Details/Training Process - -7. Start Fine Tuning SQUAD. + +7. Start fine-tuning with the SQUAD dataset. The above pretrained BERT representations can be fine tuned with just one additional output layer for a state-of-the-art question answering system. Running the following script launches fine-tuning for question answering with the SQuaD dataset. @@ -222,32 +243,32 @@ The above pretrained BERT representations can be fine tuned with just one additi Default arguments are listed below in order, -- Initial checkpoint - "/workspace/checkpoints/bert_uncased.pt" -- Number of training Epochs - 2 -- Batch size - 3 -- Learning rate - 3e-5 -- Precision (either fp16 or fp32) - fp16 -- Number of GPUs - 8 -- Seed - 1 -- SQuAD directory - /workspace/bert/data/v1.1 -- Vocabulary file (token to ID mapping) - /workspace/bert/vocab/vocab -- Output directory for result - /results/SQuAD -- Mode (“train”, “eval”, “train eval”, "predict") - train -- Config file for the bert model (It should be the same as the pretrained model) - /workspace/bert/bert_config.json +- Initial checkpoint - The default is `/workspace/checkpoints/bert_uncased.pt`. +- Number of training Epochs - The default is `2`. +- Batch size - The default is `3`. +- Learning rate - The default is `3e-5`. +- Precision (either `fp16` or `fp32`) - The default is `fp16`. +- Number of GPUs - The default is `8`. +- Seed - The default is `1`. +- SQuAD directory - The default is `/workspace/bert/data/v1.1`. +- Vocabulary file (token to ID mapping) - The default is `/workspace/bert/vocab/vocab`. +- Output directory for result - The default is `/results/SQuAD`. +- Mode (“train”, “eval”, “train eval”, "predict") - The default is `train`. +- Config file for the bert model (It should be the same as the pretrained model) - The default is `/workspace/bert/bert_config.json`. -The script will save the final checkpoint to the /results/SQuAD/pytorch_model.bin file. +The script will save the final checkpoint to the `/results/SQuAD/pytorch_model.bin` file. 9. Start validation/evaluation. -Validation can be performed with the same script as above, setting Mode to "prediction". +Validation can be performed with the same script as above, setting `Mode` to "prediction". 10. Start inference/predictions. -Inference can be performed with the same script as above, setting Mode to "eval". Inference predictions get saved to `/predictions.json` +Inference can be performed with the same script as above, setting `Mode` to `eval`. Inference predictions get saved to `/predictions.json`. ## Advanced -The following sections provide greater details of the dataset, running training and inference, and the training results. +The following sections provide greater details of the dataset, running training and inference, and the training results. ### Scripts and sample code @@ -255,147 +276,147 @@ Descriptions of the key scripts and folders are provided below. - `data/` - Contains scripts for downloading and preparing individual datasets, and will contain downloaded and processed datasets. - `scripts/` - Contains shell scripts to launch data download, pre-training, and fine-tuning. - - `data_download.sh` - Launches download and processing of required datasets. - - `run_squad.sh` - Interface for launching question answering fine-tuning with `run_squad.py`. - - `run_pretraining.sh` - Interface for launching BERT pre-training with `run_pretraining.py`. -- `create_pretraining_data.py` - Creates .hdf5 files from shared text files in the final step of dataset creation. +- `data_download.sh` - Launches download and processing of required datasets. +- `run_squad.sh` - Interface for launching question answering fine-tuning with `run_squad.py`. +- `run_pretraining.sh` - Interface for launching BERT pre-training with `run_pretraining.py`. +- `create_pretraining_data.py` - Creates `.hdf5` files from shared text files in the final step of dataset creation. - `model.py` - Implements the BERT pre-training and fine-tuning model architectures with PyTorch. -- `optimization.py` - Implements the ADAM optimizer with PyTorch. +- `optimization.py` - Implements the LAMB optimizer with PyTorch. - `run_squad.py` - Implements fine tuning training and evaluation for question answering on the [SQuaD](https://rajpurkar.github.io/SQuAD-explorer/) dataset. - `run_pretraining.py` - Implements BERT pre-training. - `run_pretraining_inference.py` - Implements evaluation of a BERT pre-trained model. ### Parameters -#### run_pretraining.py script parameters +#### Pre-training parameters The complete list of the available parameters for the `run_pretraining.py` script are: ``` - --input_dir INPUT_DIR - The input data directory. - Should contain .hdf5 files for the task. + --input_dir INPUT_DIR - The input data directory. + Should contain .hdf5 files for the task. - --config_file CONFIG_FILE - Path to a json file describing the BERT model - configuration. This file configures the model - architecture, such as the number of transformer - blocks, number of attention heads, etc. + --config_file CONFIG_FILE - Path to a json file describing the BERT model + configuration. This file configures the model + architecture, such as the number of transformer + blocks, number of attention heads, etc. - --bert_model BERT_MODEL - Specifies the type of BERT model to use; - should be one of the following: - bert-base-uncased - bert-large-uncased - bert-base-cased - bert-base-multilingual - bert-base-chinese + --bert_model BERT_MODEL - Specifies the type of BERT model to use; + should be one of the following: + bert-base-uncased + bert-large-uncased + bert-base-cased + bert-base-multilingual + bert-base-chinese - --output_dir OUTPUT_DIR - Path to the output directory where the model - checkpoints will be written. + --output_dir OUTPUT_DIR - Path to the output directory where the model + checkpoints will be written. - --max_seq_length MAX_SEQ_LENGTH - - The maximum total input sequence length after - WordPiece tokenization. Sequences longer than - this will be truncated, and sequences shorter - than this will be padded. . + --max_seq_length MAX_SEQ_LENGTH + - The maximum total input sequence length after + WordPiece tokenization. Sequences longer than + this will be truncated, and sequences shorter + than this will be padded. - --max_predictions_per_seq MAX_PREDICTIONS_PER_SEQ - - The maximum total of masked tokens per input - sequence for Masked LM. + --max_predictions_per_seq MAX_PREDICTIONS_PER_SEQ + - The maximum total of masked tokens per input + sequence for Masked LM. - --train_batch_size TRAIN_BATCH_SIZE - - Batch size per GPU for training. + --train_batch_size TRAIN_BATCH_SIZE + - Batch size per GPU for training. - --learning_rate LEARNING_RATE - - The initial learning rate for ADAM optimizer. + --learning_rate LEARNING_RATE + - The initial learning rate for LAMB optimizer. - --max_steps MAX_STEPS - Total number of training steps to perform. + --max_steps MAX_STEPS - Total number of training steps to perform. - --warmup_proportion WARMUP_PROPORTION - - Proportion of training to perform linear learning - rate warmup for. For example, 0.1 = 10% of training. + --warmup_proportion WARMUP_PROPORTION + - Proportion of training to perform linear learning + rate warmup for. For example, 0.1 = 10% of training. - --seed SEED - Sets the seed to use for random number generation. + --seed SEED - Sets the seed to use for random number generation. - --gradient_accumulation_steps GRADIENT_ACCUMULATION_STEPS - - Number of update steps to accumulate before - performing a backward/update pass. + --gradient_accumulation_steps GRADIENT_ACCUMULATION_STEPS + - Number of update steps to accumulate before + performing a backward/update pass. - --fp16 - If set, will perform computations using - automatic mixed precision. + --fp16 - If set, will perform computations using + automatic mixed precision. - --loss_scale LOSS_SCALE - Sets the loss scaling value to use when - mixed precision is used. The default value (0) - tells the script to use dynamic loss scaling - instead of fixed loss scaling. + --loss_scale LOSS_SCALE - Sets the loss scaling value to use when + mixed precision is used. The default value (0) + tells the script to use dynamic loss scaling + instead of fixed loss scaling. - --log_freq LOG_FREQ - If set, the script will output the training - loss every LOG_FREQ steps. - - --resume_from_checkpoint - If set, training will resume from a checkpoint - that currently exists in OUTPUT_DIR. + --log_freq LOG_FREQ - If set, the script will output the training + loss every LOG_FREQ steps. + + --resume_from_checkpoint - If set, training will resume from a checkpoint + that currently exists in OUTPUT_DIR. - --num_steps_per_checkpoint NUM_STEPS_PER_CHECKPOINT - - Number of update steps until a model checkpoint - is saved to disk.` + --num_steps_per_checkpoint NUM_STEPS_PER_CHECKPOINT + - Number of update steps until a model checkpoint + is saved to disk.` ``` - -#### run_squad.py script parameters + +#### Fine-tuning parameters -The run_squad.py script contains many of the same arguments as `run_pretraining.py`. +The run_squad.py script contains many of the same arguments as `run_pretraining.py`. The main script specific parameters are: ``` - --bert_model BERT_MODEL - Specifies the type of BERT model to use; - should be one of the following: - bert-base-uncased - bert-large-uncased - bert-base-cased - bert-base-multilingual - bert-base-chinese + --bert_model BERT_MODEL - Specifies the type of BERT model to use; + should be one of the following: + bert-base-uncased + bert-large-uncased + bert-base-cased + bert-base-multilingual + bert-base-chinese - --train_file TRAIN_FILE - Path to the SQuAD json for training. - For example, train-v1.1.json. + --train_file TRAIN_FILE - Path to the SQuAD json for training. + For example, train-v1.1.json. - --predict_file PREDICT_FILE - Path to the SQuAD json for predictions. - For example, dev-v1.1.json or test-v1.1.json. + --predict_file PREDICT_FILE - Path to the SQuAD json for predictions. + For example, dev-v1.1.json or test-v1.1.json. - --max_seq_length MAX_SEQ_LENGTH - - The maximum total input sequence length - after WordPiece tokenization. - Sequences longer than this will be truncated, - and sequences shorter than this will be padded. + --max_seq_length MAX_SEQ_LENGTH + - The maximum total input sequence length + after WordPiece tokenization. + Sequences longer than this will be truncated, + and sequences shorter than this will be padded. - --doc_stride DOC_STRIDE - When splitting up a long document into chunks - this parameters sets how much stride to take - between chunks of tokens. + --doc_stride DOC_STRIDE - When splitting up a long document into chunks + this parameters sets how much stride to take + between chunks of tokens. - --max_query_length MAX_QUERY_LENGTH - - The maximum number of tokens for the question. - Questions longer than - will be truncated to the value specified. + --max_query_length MAX_QUERY_LENGTH + - The maximum number of tokens for the question. + Questions longer than + will be truncated to the value specified. - --n_best_size N_BEST_SIZE - The total number of n-best predictions to - generate in the nbest_predictions.json - output file. + --n_best_size N_BEST_SIZE - The total number of n-best predictions to + generate in the nbest_predictions.json + output file. - --max_answer_length MAX_ANSWER_LENGTH - - The maximum length of an answer that can be - generated. This is needed because the start and - end predictions are not conditioned on one another. + --max_answer_length MAX_ANSWER_LENGTH + - The maximum length of an answer that can be + generated. This is needed because the start and + end predictions are not conditioned on one another. - --verbose_logging - If true, all the warnings related to data - processing will be printed. A number of warnings - are expected for a normal SQuAD evaluation. + --verbose_logging - If true, all the warnings related to data + processing will be printed. A number of warnings + are expected for a normal SQuAD evaluation. - --do_lower_case - Whether to lower case the input text. Set to - true for uncased models and false for cased models. + --do_lower_case - Whether to lower case the input text. Set to + true for uncased models and false for cased models. - --version_2_with_negative - If true, the SQuAD examples contain questions - that do not have an answer. + --version_2_with_negative - If true, the SQuAD examples contain questions + that do not have an answer. - --null_score_diff_threshold NULL_SCORE_DIFF_THRES HOLD - - A null answer will be predicted if null_score if - best_non_null is greater than NULL_SCORE_DIFF_THRESHOLD. + --null_score_diff_threshold NULL_SCORE_DIFF_THRES HOLD + - A null answer will be predicted if null_score if + best_non_null is greater than NULL_SCORE_DIFF_THRESHOLD. ``` ### Command-line options @@ -412,33 +433,30 @@ Detailed descriptions of command line options can be found in the Parameters sec For pre-training BERT, we use the concatenation of Wikipedia (2500M words) as well as Book Corpus (800M words). For Wikipedia, we extract only the text passages and ignore headers, lists, and tables. BERT requires that datasets are structured as a document level corpus rather than a shuffled sentence level corpus because it is critical to extract long contiguous sentences. -The preparation of an individual pre-training dataset is described in the run_preprocessing.sh script found in the data/bookcorpus and data/wikipedia_corpus folders. The component steps to prepare the datasets are as follows: +The preparation of pre-training dataset is described in the `bertPrep.py` script found in the `data/` folder. The component steps in the automated scripts to prepare the datasets are as follows: -1. Data download and extract - the dataset is downloaded and extracted -2. Clean and format - document tags, etc. are removed from the dataset. The end result of this step is a {dataset_name}.txt file that contains the entire corpus. Each line in the text file contains an entire document from the corpus. -3. Sentence segmentation - the corpus text file is processed into separate sentences. The result of this step is a {dataset_name}.segmented.nltk.txt file in a final_text_file_single directory that contains the entire corpus, with each sentence having its own line. Documents are separated by a new line between documents. -4. Sharding - the sentence segmented corpus file is split into a number of smaller text documents. The sharding is configured so that a document will not be split between two shards. -5. HDF5 file creation - each text file shard is processed by the `create_pretraining_data.py` script to produce a corresponding HDF5 file. The script generates input data and labels for masked language modeling and sentence prediction tasks for the input text shard. +1. Data download and extract - the dataset is downloaded and extracted. -The tools used for preparing the Bookcorpus and Wikipedia datasets can be applied to prepare an arbitrary corpus. The create_datasets_from_start.sh script in the data/ directory applies sentence segmentation, sharding, and hdf5 file creation given an arbitrary text file containing a document-separated text corpus. +2. Clean and format - document tags, etc. are removed from the dataset. -Therefore, to prepare a custom dataset, first replicate steps 1 and 2 above. Then, simply call: +3. Sentence segmentation - the corpus text file is processed into separate sentences. -`create_datasets_from_start.sh ` +4. Sharding - the sentence segmented corpus file is split into a number of uniformly distributed smaller text documents. -The script will then create a / directory in the data/ folder and apply steps 3 through 5 on the document corpus. The end result of the script will be the /hdf5_shards directory, which contains the hdf5 files for the custom text corpus. +5. hdf5 file creation - each text file shard is processed by the `create_pretraining_data.py` script to produce a corresponding hdf5 file. The script generates input data and labels for masked language modeling and sentence prediction tasks for the input text shard. -For fine-tuning a pre-trained BERT model for specific tasks, by default this repository prepares the following datasets: +The tools used for preparing the Bookcorpus and Wikipedia datasets can be applied to prepare an arbitrary corpus. The `create_datasets_from_start.sh` script in the `data/` directory applies sentence segmentation, sharding, and hdf5 file creation given an arbitrary text file containing a document-separated text corpus. -- [SQuAD](https://rajpurkar.github.io/SQuAD-explorer/): for question answering -- [SWAG](https://rowanzellers.com/swag/): for multiple choice -- [GLUE](https://gluebenchmark.com/): a benchmark that uses multiple datasets to measure sentence classification + +For fine-tuning a pre-trained BERT model for specific tasks, by default this repository prepares the following dataset: + +- [SQuAD](https://rajpurkar.github.io/SQuAD-explorer/): for question answering #### Dataset guidelines The procedure to prepare a text corpus for pre-training is described in the above section. This section will provide additional insight into how exactly raw text is processed so that it is ready for pre-training. -First, raw text is tokenized using [WordPiece tokenization](https://arxiv.org/pdf/1609.08144.pdf). A [CLS] token is inserted at the start of every sequence, and the two sentences in the sequence are separated with a [SEP] token. +First, raw text is tokenized using [WordPiece tokenization](https://arxiv.org/pdf/1609.08144.pdf). A [CLS] token is inserted at the start of every sequence, and the two sentences in the sequence are separated by a [SEP] token. Note: BERT pre-training looks at pairs of sentences at a time. A sentence embedding token [A] is added to the first sentence and token [B] to the next. @@ -450,31 +468,7 @@ The `create_pretraining_data.py` script takes in raw text and creates training i #### Multi-dataset -This repository provides tools to combine multiple datasets into a single dataset for pre-training on a diverse text corpus. There are three options to accomplish this, such as relocating hdf5 files, inter sequence-pair mixing and cleaning each source corpora. - -##### Relocating hdf5 files - -If hdf5 files have been created for each component dataset, then one can simply create a combined dataset by moving all hdf5 files to a single directory. However, in the training process only one hdf5 file is consumed at a time, therefore, the training instances of any given training batch will all belong to the same source dataset. - -##### Inter sequence-pair mixing - -Another option for merging datasets once hdf5 files have been created is inter sequence-pair mixing. This is the method used to merge the Wikipedia and Bookcorpus datasets for our pre-training convergence experiments. The merge_datasets_after_creation.sh tool in the data/ directory is a tool to merge data from multiple source corpora this way. To perform this mixing from the data/ directory, simply call: - -`bash merge_datasets_after_creation.sh ` - -For example, to merge the Bookcorpus and Wikipedia corpora provided with this repository and create 1024 new shards containing the mixed training instances, first make sure that data/bookcorpus/hdf5_shards/ and data/wikipedia_corpus/hdf5_shards/ exist and contain hdf5 files, then, from the data directory run: - -`bash merge_datasets_after_creation.sh inter_instance_merged_wiki+books bookcorpus/hdf5_shards/,wikipedia_corpus/hdf5_shards/ 1024` - -##### Retain document-level granularity - -It is also possible to merge datasets while retaining document level granularity. To accomplish this, each source corpora must have been cleaned and merged so that each corpus has its own corresponding text file with each document fitting on one line. Examples of this kind of file would be bookcorpus.txt and wikipedia.txt. Then, to create a merged dataset run: - -`bash create_datasets_from_start.sh ...` - -To create a mixture of Wikipedia and Bookcorpus using this method, first ensure that the wikipedia_corpus/wikipedia_corpus.txt and bookcorpus/bookcorpus.txt files exist. Then from the data directory run: - -`bash create_datasets_from_start.sh merged_wiki+books wikipedia/wikipedia_corpus.txt bookcorpus/bookcorpus.txt` +This repository provides functionality to combine multiple datasets into a single dataset for pre-training on a diverse text corpus at the shard level in `. /workspace/bert/data/create_datasets_from_start.sh`. ### Training process @@ -484,41 +478,58 @@ The training process consists of two steps: pre-training and fine-tuning. Pre-training is performed using the `run_pretraining.py` script along with parameters defined in the `scripts/run_pretraining.sh`. -The `run_pretraining.sh` script runs a job on a single node that trains the BERT-large model from scratch using the Wikipedia and BookCorpus datasets as training data. By default, the training script: +The `run_pretraining.sh` script runs a job on a single node that trains the BERT-large model from scratch using the Wikipedia and BookCorpus datasets as training data using LAMB optimizer. By default, the training script runs two phases of training: -- Runs on 8 GPUs with training batch size of 14 per GPU -- Uses a learning rate of 0.4375e-4 +Phase 1: (Maximum sequence length of 128) +- Runs on 8 GPUs with training batch size of 64 per GPU +- Uses a learning rate of 6e-3 - Has FP16 precision enabled -- Runs for 2285714 steps, where the first 1% are warm-up steps -- Saves a checkpoint every 2000 iterations (keeps only the latest 3 checkpoints) and at the end of training. All checkpoints, and training logs are saved to the /results directory (in the container which can be mounted to a local directory). +- Runs for 7038 steps, where the first 28.43% (2000) are warm-up steps +- Saves a checkpoint every 200 iterations (keeps only the latest 3 checkpoints) and at the end of training. All checkpoints, and training logs are saved to the `/results` directory (in the container which can be mounted to a local directory). - Creates a log file containing all the output -These parameters will train Wikipedia and BooksCorpus to reasonable accuracy on a DGX-1 with 32GB V100 cards. If you want to match Google’s best results from the BERT paper, you should either train for the above number of 2,285,714 steps on a DGX-1 or train on 16 GPUs on a DGX-2. The DGX-2 having 16 GPUs will be able to fit a batch size twice as large as a DGX-1 (224 vs 112), hence the DGX-2 can finish in half as many steps. For example: +Phase 2: (Maximum sequence length of 512) +- Runs on 8 GPUs with training batch size of 8 per GPU +- Uses a learning rate of 4e-3 +- Has FP16 precision enabled +- Runs for 1563 steps, where the first 12.8% are warm-up steps +- Saves a checkpoint every 200 iterations (keeps only the latest 3 checkpoints) and at the end of training. All checkpoints, and training logs are saved to the `/results` directory (in the container which can be mounted to a local directory). +- Creates a log file containing all the output -`bash run_pretraining.sh ` +These parameters will train on Wikipedia and BooksCorpus to SoTA accuracy on a DGX-1 with 32GB V100 cards. + +`bash run_pretraining.sh ` Where: -- is per-GPU batch size used for training. Larger batch sizes run more efficiently, but require more memory. -- is the base learning rate for training -- is the type of math in your model, can be either FP32 or FP16. The options mean: - - FP32: 32-bit IEEE single precision floats. - - FP16: Mixed precision 16 and 32 bit floats. -- is the number of GPUs to use for training. Must be equal to or smaller than the number of GPUs attached to your node. -- is the percentage of training steps used for warm-up at the start of training. -- is the total number of training steps. -- controls how often checkpoints are saved. -- if set to true, training should resume from latest model in /results/checkpoints. Default is false. -- a flag indicating if output should be written to a log file or not (acceptable values are true or false. true indicates output should be saved to a log file.) -- a flag indicating whether a larger batch should be simulated with gradient accumulation. -- an integer indicating the number of steps to accumulate gradients over. Effective batch size = batch size / gradient_accumulation_steps -- random seed for the run +- `` is per-GPU batch size used for training. Larger batch sizes run more efficiently, but require more memory. +- `` is the base learning rate for training +- `` is the type of math in your model, can be either `fp32` or `fp16`. The options mean: + - FP32: 32-bit IEEE single precision floats. + - FP16: Mixed precision 16 and 32 bit floats. +- `` is the number of GPUs to use for training. Must be equal to or smaller than the number of GPUs attached to your node. +- `` is the percentage of training steps used for warm-up at the start of training. +- `` is the total number of training steps. +- `` controls how often checkpoints are saved. +- `` if set to true, training should resume from latest model in /results/checkpoints. Default is false. +- `` a flag indicating if output should be written to a log file or not (acceptable values are true or false. true indicates output should be saved to a log file.) +- `` a flag indicating whether a larger batch should be simulated with gradient accumulation. +- `` an integer indicating the number of steps to accumulate gradients over. Effective batch size = `training_batch_size` / `gradient_accumulation_steps`. +- `` random seed for the run. +- `` - If set to `true`, performs allreduce only after the defined number of gradient accumulation steps. +- `` - If set to `true`, performs allreduce after gradient accumulation steps in FP16. +- `` - If set to `true`, accumulates/sums the gradients in FP16. +- `` is per-GPU batch size used for training in phase 2. Larger batch sizes run more efficiently, but require more memory. +- `` is the base learning rate for training phase 2. +- `` is the percentage of training steps used for warm-up at the start of training. +- `` is the total number of training steps for phase 2, to be continued in addition to phase 1. +- `` an integer indicating the number of steps to accumulate gradients over in phase 2. Effective batch size = `training_batch_size_phase2` / `gradient_accumulation_steps_phase2`. For example: -`bash scripts/run_pretraining.sh 14 0.875e-4 fp16 16 0.01 1142857 2000 false true` +`bash scripts/run_pretraining.sh` -Trains BERT-large from scratch on a single DGX-2 using FP16 arithmetic. Checkpoints are written out every 2000 steps and all printouts are saved to a logfile. +Trains BERT-large from scratch on a single DGX-1 32G using FP16 arithmetic. 90% of the training steps are done with sequence length 128 (phase1 of training) and 10% of the training steps are done with sequence length 512 (phase2 of training). ##### Fine-tuning @@ -530,39 +541,39 @@ By default, each Python script implements fine-tuning a pre-trained BERT model f - Uses 8 GPUs - Has FP16 precision enabled -- Saves a checkpoint at the end of training to the /results/ folder +- Saves a checkpoint at the end of training to the `/results/` folder -Fine-tuning Python scripts implement support for mixed precision and multi-GPU training through NVIDIA’s [Apex](https://github.com/NVIDIA/apex) library. For a full list of parameters and associated explanations, consult the [Parameters](https://docs.google.com/document/d/1O5f8pzO89rFhFgysrXAN6Y3di5oyhDlf_K6Q63VrMWM/edit#heading=h.xke3foclcggx) section. +Fine-tuning Python scripts implement support for mixed precision and multi-GPU training through NVIDIA’s [Apex](https://github.com/NVIDIA/apex) library. For a full list of parameters and associated explanations, consult the [Parameters](#parameters) section. All fine-tuning shell scripts have the same positional arguments, outlined below: -`bash scripts/run_squad.sh ` +`bash scripts/run_squad.sh ` -By default, the mode positional argument is set to train eval. See the [Quick start guide](https://docs.google.com/document/d/1O5f8pzO89rFhFgysrXAN6Y3di5oyhDlf_K6Q63VrMWM/edit#heading=h.d5y8qlmb8lpr) for explanations of each positional argument. +By default, the mode positional argument is set to train eval. See the [Quick Start Guide](#quick-start-guide) for explanations of each positional argument. Note: The first positional argument (the path to the checkpoint to load) is required. -Each fine-tuning script assumes that the corresponding dataset files exist in the data/ directory or separate path can be a command line input to run_squad.sh +Each fine-tuning script assumes that the corresponding dataset files exist in the `data/` directory or separate path can be a command line input to `run_squad.sh`. ### Inference process -##### Pre-training +##### Pre-training inference -Inference on a pretrained model is performed using the `run_pretraining_inference.py` script along with parameters defined in scripts/run_pretraining_inference.sh. Inference is supported both for single and multi-gpu. By setting either the --eval or --prediction flag, you can choose between running evaluation on a given dataset or doing prediction (on both masked language model and next sentence prediction). +Inference on a pretrained model is performed using the `run_pretraining_inference.py` script along with parameters defined in `scripts/run_pretraining_inference.sh`. Inference is supported both for single and multi-GPU. By setting either the `--eval` or `--prediction` flag, you can choose between running evaluation on a given dataset or doing prediction (on both masked language model and next sentence prediction). Prediction mode can be used to measure the inference turnaround time. -The run_pretraining_inference.sh script takes a model and a dataset and performs inference/evaluation on it. By default, the inferencing script: +The `run_pretraining_inference.sh` script takes a model and a dataset and performs inference/evaluation on it. By default, the inferencing script: - Has FP16 precision enabled - Runs on 8 GPUs -- Evaluates the latest checkpoint present in /results/checkpoints with a batch size of 14 +- Evaluates the latest checkpoint present in `/results/checkpoints` with a batch size of 14 - Runs inference on the entire Wikipedia dataset -This script outputs prediction file to /results/pyt_bert_pretraining_inference__..log. The output log contains information about: +This script outputs prediction file to `/results/pyt_bert_pretraining_inference__..log`. The output log contains information about: - Inference performance -- Loss (masked language model loss and next sentence prediction loss) of the specified dataset if ground truths exist with the --eval flag. +- Loss (masked language model loss and next sentence prediction loss) of the specified dataset if ground truths exist with the `--eval` flag. For example: @@ -570,21 +581,21 @@ For example: Where: -- is per-GPU batch size used for inference. Larger batch sizes run more efficiently, but require more memory. -- is the type of math in your model, can be either FP32 or FP16. The options mean: - - FP32: 32-bit IEEE single precision floats - - FP16: 16-bit floats for 3.2x faster inference -- is the number of GPUs to use for inference. Must be equal to or smaller than the number of GPUs attached to your node. -- is either --eval for evaluation or --prediction for inference -- is the model checkpoint to run inference on. Default is -1, which takes the most recent model checkpoint from the checkpoints folder. -- is the total number of inference steps per process. Default is -1, which iterates over the entire dataset. -- a flag indicating if output should be written to a logfile or not (acceptable values are true or false. true indicates output should be saved to a logfile.) +- `` is per-GPU batch size used for inference. Larger batch sizes run more efficiently, but require more memory. +- `` is the type of math in your model, can be either `fp32` or `fp16`. The options mean: + - `fp32`: 32-bit IEEE single precision floats + - `fp16`: 16-bit floats for 3.2x faster inference +- `` is the number of GPUs to use for inference. Must be equal to or smaller than the number of GPUs attached to your node. +- `` is either `--eval` for evaluation or `--prediction` for inference +- `` is the model checkpoint to run inference on. Default is `-1`, which takes the most recent model checkpoint from the checkpoints folder. +- `` is the total number of inference steps per process. Default is `-1`, which iterates over the entire dataset. +- `` a flag indicating if output should be written to a logfile or not (acceptable values are true or false. true indicates output should be saved to a logfile.) For example: `bash scripts/run_pretraining_inference.sh 14 fp16 8 eval -1 -1 true` -#### Fine-tuning +#### Fine-tuning inference Evaluation fine-tuning is enabled by the same scripts as training: @@ -592,7 +603,7 @@ Evaluation fine-tuning is enabled by the same scripts as training: The mode positional argument of the shell script is used to run in evaluation mode. The fine-tuned BERT model will be run on the evaluation dataset, and the evaluation loss and accuracy will be displayed. -Each inference shell script expects dataset files to exist in the same locations as the corresponding training scripts. The inference scripts can be run with default settings. By setting `mode` variable in the script to either "eval" or "prediction" flag, you can choose between running evaluation on a given dataset or doing prediction. +Each inference shell script expects dataset files to exist in the same locations as the corresponding training scripts. The inference scripts can be run with default settings. By setting `mode` variable in the script to either `eval` or `prediction` flag, you can choose between running evaluation on a given dataset or doing prediction. `bash scripts/run_squad.sh ` @@ -606,6 +617,8 @@ The following section shows how to run benchmarks measuring the model performanc #### Training performance benchmark +Training performance benchmarks for both pretraining and fine-tuning can be obtained by running `scripts/run_pretraining.sh` and `scripts/run_squad.sh` respectively. The required parameters can be passed through the command line as described in [Training process](#training-process). + To benchmark the training performance on a specific batch size, run: `bash scripts/run_squad.sh train ` @@ -614,10 +627,12 @@ An example call used to generate throughput numbers: `bash scripts/run_squad.sh /workspace/bert/bert_large_uncased_wiki+books.pt.model 2.0 4 3e-5 fp16 8 42 /workspace/bert/squad_data /workspace/bert/scripts/vocab/vocab /results/SQuAD train /workspace/bert/bert_config.json -1` -Training benchmark was run on 1x V100 16G GPU. + #### Inference performance benchmark +Inference performance benchmarks for both pretraining and fine-tuning can be obtained by running `scripts/run_pretraining_inference.sh` and `scripts/run_squad.sh` respectively. The required parameters can be passed through the command line as described in [Inference process](#inference-process). + To benchmark the inference performance on a specific batch size, run: `bash scripts/run_squad.sh eval ` @@ -626,17 +641,28 @@ An example call used to generate throughput numbers: `bash scripts/run_squad.sh /workspace/bert/bert_large_uncased_wiki+books.pt.model 2.0 4 3e-5 fp16 8 42 /workspace/bert/squad_data /workspace/bert/scripts/vocab/vocab /results/SQuAD eval /workspace/bert/bert_config.json -1` -Inference benchmark was run on 1x V100 16G GPU. + ### Results - + #### Training accuracy results -##### NVIDIA DGX-1 (8x V100 16G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-1 with (8x V100 16G) GPUs. +Our results were obtained by running `scripts/run_squad.sh` and `scripts/run_pretraining.sh` training scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-2 with (16x V100 32G) GPUs for pretraining and NVIDIA DGX-1 with (8x V100 16G) GPUs for fine-tuning. + +Note: Pretraining results were obtained with a dataset that was created using an earlier version of the data preprocessing scripts than are currently in this repository, and with an an earlier snapshot of wikidumps. The results in the table will be updated soon with results using the latest data prep scripts. Early data show the results are quite similar. + + +##### Pre-training loss results + +DGX System| GPUs | Accumulated Batch size / GPU (Phase 1 and Phase 2) | Accumulation steps (Phase 1 and Phase 2) | Final Loss - FP32 | Final Loss - mixed precision | Time to train(days) - FP32 | Time to train(days) - mixed precision | Time to train speedup (FP32 to mixed precision) +|-|-|-|-|-|-|-|-|- +NVIDIA DGX-1 With 16G|8|8192 and 4196 |512 and 1024|-|1.53|-|6.84|- +NVIDIA DGX-2 With 32G|16|4096 and 2048 |64 and 256|-|1.52|-|2.71|- + +##### Fine-tuning accuracy results | GPUs | Batch size / GPU | Accuracy - FP32(% F1) | Accuracy - mixed precision(% F1) | Time to train(hours) - FP32 | Time to train(hours) - mixed precision | Time to train speedup (FP32 to mixed precision) |-|-|-|-|-|-|- @@ -644,7 +670,15 @@ Our results were obtained by running the `scripts/run_squad.sh` training script ##### Training stability test -Training stability with 8 GPUs, fp16 computations, batch size of 4: +###### Pre-training stability test + +| Accuracy Metric | Seed 1 +|-|-|-|-|-|-|-|- +|Final Loss | 1.52 + +###### Fine-tuning stability test + +Training stability with 8 GPUs, FP16 computations, batch size of 4: | Accuracy Metric | Seed 1 | Seed 2 | Seed 3 | Seed 4 | Seed 5 | Mean | Standard Deviation |-|-|-|-|-|-|-|- @@ -653,9 +687,24 @@ Training stability with 8 GPUs, fp16 computations, batch size of 4: #### Training performance results -##### NVIDIA DGX 1 (8x V100 16G) +##### Training performance: NVIDIA DGX-1 (8x V100 16G) + +Our results were obtained by running `scripts/run_pretraining.sh` and `scripts/run_squad.shtraining scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-1 with (8x V100 16G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. + +###### Pre-training NVIDIA DGX-1 With 16G + +| GPUs | Batch size / GPU | Sequence length | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision +|------------------|----------------------|-------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- +|1 | 4| 512| 7.56 |26.64 |3.52 |3.50 | 1.00 +|4 | 4| 512| 28 |98.24 | 3.50| 3.70| 3.69 +| 8| 4| 512| 56.16 |194.56 | 3.46| 7.43| 7.30 +|1 | 16| 128| N/A |151.36 |N/A |N/A | 1.00 +|4 | 16| 128| N/A |602.88 | N/A| N/A| 3.98 +| 8| 16| 128| N/A |1192.96 | N/A| N/A| 7.88 + + +###### Fine-tuning NVIDIA DGX-1 With 16G -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-1 with (8x V100 16G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. | GPUs | Batch size / GPU | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision |------------------|----------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- @@ -666,22 +715,59 @@ Our results were obtained by running the `scripts/run_squad.sh` training script |4 | 10|N/A |163.6 | N/A| N/A| 3.62 | 8| 10|N/A |327.2| N/A| N/A| 7.24 -##### NVIDIA DGX 1 (8x V100 32G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-1 with (8x V100 32G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. +##### Training performance: NVIDIA DGX-1 (8x V100 32G) + +Our results were obtained by running `scripts/run_pretraining.sh` and `scripts/run_squad.sh` training scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-1 with (8x V100 32G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. + +###### Pre-training NVIDIA DGX-1 With 32G + +| GPUs | Batch size / GPU | Sequence length | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision +|------------------|----------------------|-------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- +|1 | 8| 512|8.36 |30.08 | 3.68| 1.00| 1.00 +|4 | 8| 512|31.52 |116.80 | 3.70| 3.84| 3.82 +| 8| 8| 512|62.72 |231.68 | 3.69| 7.68| 7.61 +|1 | 10| 512|N/A |46.00| N/A| N/A| 1.0 +|4 | 10| 512|N/A |164.00 | N/A| N/A| 3.57 +| 8| 10| 512|N/A |325.60| N/A| N/A| 7.08 +|1 | 64| 128|N/A |186.32| N/A| N/A| 1.0 +|4 | 64| 128|N/A |739.84 | N/A| N/A| 3.97 +| 8| 64| 128|N/A |1479.68| N/A| N/A| 7.94 + +###### Fine-tuning NVIDIA DGX-1 With 32G | GPUs | Batch size / GPU | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision |------------------|----------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- -|1 | 4|8.64 |36.04 | 4.171| 1.00| 1.00 -|4 | 4|31.52 |116.80 | 3.71| 3.64| 3.24 -| 8| 4|64.32 |231.04 | 3.59| 7.44| 6.41 +|1 | 8|8.64 |36.04 | 4.171| 1.00| 1.00 +|4 | 8|31.52 |116.80 | 3.71| 3.64| 3.24 +| 8| 8|64.32 |231.04 | 3.59| 7.44| 6.41 |1 | 10|N/A |46.00| N/A| N/A| 1.0 |4 | 10|N/A |164.00 | N/A| N/A| 3.57 | 8| 10|N/A |325.60| N/A| N/A| 7.08 -##### NVIDIA DGX 2 (16x V100 32G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-2 with (16x V100 32G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. +##### Training performance: NVIDIA DGX-2 (16x V100 32G) + +Our results were obtained by running `scripts/run_pretraining.sh` and `scripts/run_squad.sh` training scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-2 with (16x V100 32G) GPUs. Performance numbers (in sequences per second) were averaged over an entire training epoch. + +###### Pre-training NVIDIA DGX-2 With 32G + +| GPUs | Batch size / GPU | Sequence length | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision +|------------------|----------------------|-------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- +|1 |8 | 512|9.0| 32.32| 3.59| 1.00| 1.00 +|4 |8 | 512| 34.4| 124.16| 3.60| 3.82| 3.84 +|8 | 8| 512| 68.1671.36| 247.04| 3.62| 7.57| 7.64 +|16 | 8| 512| 135.68| 488.96| 3.60| 15.08| 15.13 +|1 |10 | 512|N/A | 47.40| N/A| N/A| 1.00 +|4 |10 | 512| N/A| 165.60| N/A| N/A| 3.49 +|8 | 10| 512| N/A| 325.60| N/A| N/A| 6.87 +|16 | 10| 512| N/A| 648.00| N/A| N/A| 13.67 +|1 |64 | 128|N/A | 199.05| N/A| N/A| 1.00 +|4 |64 | 128| N/A| 796.16| N/A| N/A| 3.99 +|8 | 64| 128| N/A| 1592.32| N/A| N/A| 7.99 +|16 | 64| 128| N/A| 3174.4| N/A| N/A| 15.94 + +###### Fine-tuning NVIDIA DGX-2 With 32G | GPUs | Batch size / GPU | Throughput - FP32(sequences/sec) | Throughput - mixed precision(sequences/sec) | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision |------------------|----------------------|-----------------------------------------------|------------------------------------|---------------------------------|----------------------|---------------------------------------------- @@ -698,30 +784,54 @@ To achieve these same results, follow the steps in the [Quick Start Guide](#quic #### Inference performance results -##### Inference performance: NVIDIA DGX 1 (8x V100 16G) +##### Inference performance: NVIDIA DGX-1 (1x V100 16G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-1 with (8x V100 16G) GPUs. +Our results were obtained by running `scripts/run_pretraining_inference.sh` on data of sequence length 512 and `scripts/run_squad.sh` scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-1 with (1x V100 16G) GPUs. + +###### Pre-training inference on NVIDIA DGX-1 with 16G + +|GPUs | Throughput - FP32(sequences/sec)|Throughput - Mixed Precision(sequences/sec) +|---------- |---------|--------------- +| 1| 28.32| 94.36 + +###### Fine-tuning inference on NVIDIA DGX-1 with 16G |GPUs | Throughput - FP32(sequences/sec)|Throughput - Mixed Precision(sequences/sec) |---------- |---------|--------------- | 1| 37.64| 119.76 -##### Inference performance: NVIDIA DGX 1 (8x V100 32G) +##### Inference performance: NVIDIA DGX-1 (1x V100 32G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-1 with (8x V100 32G) GPUs. +Our results were obtained by running `scripts/run_pretraining_inference.sh` and `scripts/run_squad.sh` scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-1 with (1x V100 32G) GPUs. + +###### Pre-training inference on NVIDIA DGX-1 with 32G + +|GPUs | Throughput(sequences/sec) - FP32|Throughput - Mixed Precision(sequences/sec) +|---------- |---------|--------------- +| 1| 27.58| 90.16 + +###### Fine-tuning inference on NVIDIA DGX-1 with 32G |GPUs | Throughput(sequences/sec) - FP32|Throughput - Mixed Precision(sequences/sec) |---------- |---------|--------------- | 1| 37.64| 119.76 -##### Inference performance: NVIDIA DGX 2 (16x V100 32G) +##### Inference performance: NVIDIA DGX-2 (1x V100 32G) -Our results were obtained by running the `scripts/run_squad.sh` training script in the pytorch:19.06-py3 NGC container on NVIDIA DGX-2 with (16x V100 32G) GPUs. +Our results were obtained by running `scripts/run_pretraining_inference.sh` and `scripts/run_squad.sh` scripts in the pytorch:19.07-py3 NGC container on NVIDIA DGX-2 with (1x V100 32G) GPUs. + +###### Pre-training inference on NVIDIA DGX-2 with 32G + +|GPUs | Throughput - FP32(sequences/sec)|Throughput - Mixed Precision(sequences/sec) +|---------- |---------|--------------- +| 1| 30.24 97.72 + +###### Fine-tuning inference on NVIDIA DGX-2 with 32G |GPUs | Throughput - FP32(sequences/sec)|Throughput - Mixed Precision(sequences/sec) |---------- |---------|--------------- | 1| 35.76| 112.60 - + To achieve these same results, follow the steps in the [Quick Start Guide](#quick-start-guide). The inference performance metrics used were items/second. @@ -730,6 +840,12 @@ The inference performance metrics used were items/second. ### Changelog +October 2019 + +- Pretraining support with LAMB optimizer + +- Updated Data download and Preprocessing + July 2019 - Initial release diff --git a/PyTorch/LanguageModeling/BERT/bind_pyt.py b/PyTorch/LanguageModeling/BERT/bind_pyt.py new file mode 100644 index 00000000..9e907f47 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/bind_pyt.py @@ -0,0 +1,124 @@ +import sys +import subprocess +import os +import socket +from argparse import ArgumentParser, REMAINDER + +import torch + + +def parse_args(): + """ + Helper function parsing the command line options + @retval ArgumentParser + """ + parser = ArgumentParser(description="PyTorch distributed training launch " + "helper utilty that will spawn up " + "multiple distributed processes") + + # Optional arguments for the launch helper + parser.add_argument("--nnodes", type=int, default=1, + help="The number of nodes to use for distributed " + "training") + parser.add_argument("--node_rank", type=int, default=0, + help="The rank of the node for multi-node distributed " + "training") + parser.add_argument("--nproc_per_node", type=int, default=1, + help="The number of processes to launch on each node, " + "for GPU training, this is recommended to be set " + "to the number of GPUs in your system so that " + "each process can be bound to a single GPU.") + parser.add_argument("--master_addr", default="127.0.0.1", type=str, + help="Master node (rank 0)'s address, should be either " + "the IP address or the hostname of node 0, for " + "single node multi-proc training, the " + "--master_addr can simply be 127.0.0.1") + parser.add_argument("--master_port", default=29500, type=int, + help="Master node (rank 0)'s free port that needs to " + "be used for communciation during distributed " + "training") + parser.add_argument('--no_hyperthreads', action='store_true', + help='Flag to disable binding to hyperthreads') + parser.add_argument('--no_membind', action='store_true', + help='Flag to disable memory binding') + + # non-optional arguments for binding + parser.add_argument("--nsockets_per_node", type=int, required=True, + help="Number of CPU sockets on a node") + parser.add_argument("--ncores_per_socket", type=int, required=True, + help="Number of CPU cores per socket") + + # positional + parser.add_argument("training_script", type=str, + help="The full path to the single GPU training " + "program/script to be launched in parallel, " + "followed by all the arguments for the " + "training script") + + # rest from the training program + parser.add_argument('training_script_args', nargs=REMAINDER) + return parser.parse_args() + +def main(): + args = parse_args() + + # variables for numactrl binding + + + NSOCKETS = args.nsockets_per_node + NGPUS_PER_SOCKET = (args.nproc_per_node // args.nsockets_per_node) + (1 if (args.nproc_per_node % args.nsockets_per_node) else 0) + NCORES_PER_GPU = args.ncores_per_socket // NGPUS_PER_SOCKET + + # world size in terms of number of processes + dist_world_size = args.nproc_per_node * args.nnodes + + # set PyTorch distributed related environmental variables + current_env = os.environ.copy() + current_env["MASTER_ADDR"] = args.master_addr + current_env["MASTER_PORT"] = str(args.master_port) + current_env["WORLD_SIZE"] = str(dist_world_size) + + processes = [] + + for local_rank in range(0, args.nproc_per_node): + # each process's rank + dist_rank = args.nproc_per_node * args.node_rank + local_rank + current_env["RANK"] = str(dist_rank) + + # form numactrl binding command + cpu_ranges = [local_rank * NCORES_PER_GPU, + (local_rank + 1) * NCORES_PER_GPU - 1, + local_rank * NCORES_PER_GPU + (NCORES_PER_GPU * NGPUS_PER_SOCKET * NSOCKETS), + (local_rank + 1) * NCORES_PER_GPU + (NCORES_PER_GPU * NGPUS_PER_SOCKET * NSOCKETS) - 1] + + numactlargs = [] + if args.no_hyperthreads: + numactlargs += [ "--physcpubind={}-{}".format(*cpu_ranges[0:2]) ] + else: + numactlargs += [ "--physcpubind={}-{},{}-{}".format(*cpu_ranges) ] + + if not args.no_membind: + memnode = local_rank // NGPUS_PER_SOCKET + numactlargs += [ "--membind={}".format(memnode) ] + + # spawn the processes + cmd = [ "/usr/bin/numactl" ] \ + + numactlargs \ + + [ sys.executable, + "-u", + args.training_script, + "--local_rank={}".format(local_rank) + ] \ + + args.training_script_args + + process = subprocess.Popen(cmd, env=current_env) + processes.append(process) + + for process in processes: + process.wait() + + +if __name__ == "__main__": + main() + + diff --git a/PyTorch/LanguageModeling/BERT/config_DGX1.sh b/PyTorch/LanguageModeling/BERT/config_DGX1.sh new file mode 100644 index 00000000..250a1ac6 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX1.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=16 +LEARNING_RATE=6e-3 +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results --fp16 --max_steps=7508 --num_steps_per_checkpoint=200" + +## System run parms +DGXNNODES=1 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=00:15:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=8 +DGXSOCKETCORES=20 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2.sh new file mode 100644 index 00000000..6860fed5 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=4096 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=7038 --num_steps_per_checkpoint=2500 --log_freq=1 --gradient_accumulation_steps=64 --allreduce_post_accumulation --allreduce_post_accumulation_fp16" + +## System run parms +DGXNNODES=1 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" +WALLTIME="3-00:00:00" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x64x4_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x64x4_wiki_books.sh new file mode 100644 index 00000000..26be869f --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x64x4_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=256 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /checkpoints --fp16 --max_steps=7038 --num_steps_per_checkpoint=2500 --log_freq=1 --gradient_accumulation_steps=4 --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_from_checkpoint" + +## System run parms +DGXNNODES=16 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x8x16_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x8x16_wiki_books_phase2.sh new file mode 100644 index 00000000..5ccf00e3 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_16x16x8x16_wiki_books_phase2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=128 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.128 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1563 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=16 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=16 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_1x16x8x256_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_1x16x8x256_wiki_books_phase2.sh new file mode 100644 index 00000000..ce4d0d6d --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_1x16x8x256_wiki_books_phase2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=2048 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.128 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1563 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=256 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=1 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME="00:15:00" +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_2x16x40.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_2x16x40.sh new file mode 100644 index 00000000..e0902ad2 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_2x16x40.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data \ +--do_train \ +--config_file=bert_config.json \ + --max_seq_length=128 \ + --max_predictions_per_seq=20 \ + --output_dir=/results/output \ + --fp16 \ + --max_steps=7508 \ + --num_steps_per_checkpoint=200 \ + --log_freq=1" + +## System run parms +DGXNNODES=2 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=00:30:00 +DEADLINE=$(date -d '+72 hours' '+%FT%T') +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x32x3_wiki_books_fp32.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x32x3_wiki_books_fp32.sh new file mode 100644 index 00000000..57de84db --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x32x3_wiki_books_fp32.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +## DL params +BATCHSIZE=96 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +SEED=23448 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --max_steps=7038 --num_steps_per_checkpoint=10000 --log_freq=1 --gradient_accumulation_steps=3" + +## System run parms +DGXNNODES=46 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=05:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x48x2_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x48x2_wiki_books.sh new file mode 100644 index 00000000..0d120b41 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x48x2_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=96 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=7038 --num_steps_per_checkpoint=10000 --log_freq=1 --gradient_accumulation_steps=2 --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --accumulate_into_fp16" + +## System run parms +DGXNNODES=46 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x4x12_wiki_books_phase2_fp32.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x4x12_wiki_books_phase2_fp32.sh new file mode 100644 index 00000000..9d3cd743 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x4x12_wiki_books_phase2_fp32.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=48 +LEARNING_RATE="4.12e-3" +WARMUP_UPDATES=0.138 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --max_steps=1450 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=12 --resume_from_checkpoint" + +## System run parms +DGXNNODES=46 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=05:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x8x6_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x8x6_wiki_books_phase2.sh new file mode 100644 index 00000000..87e5a520 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_46x16x8x6_wiki_books_phase2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=48 +LEARNING_RATE="4.12e-3" +WARMUP_UPDATES=0.138 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1450 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=6 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --accumulate_into_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=46 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x64x2_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x64x2_wiki_books.sh new file mode 100644 index 00000000..f2473f17 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x64x2_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=128 +LEARNING_RATE="6.5e-3" +WARMUP_UPDATES=0.5328 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=4692 --num_steps_per_checkpoint=10000 --log_freq=1 --gradient_accumulation_steps=2" + +## System run parms +DGXNNODES=48 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x8x8_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x8x8_wiki_books_phase2.sh new file mode 100644 index 00000000..80ecbab0 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_48x16x8x8_wiki_books_phase2.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="5e-3" +WARMUP_UPDATES=0.192 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1042 --num_steps_per_checkpoint=1000 --log_freq=1 --gradient_accumulation_steps=8 --resume_from_checkpoint --phase2 --allreduce_post_accumulation --allreduce_post_accumulation_fp16" + +## System run parms +DGXNNODES=48 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x32x2_fp32_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x32x2_fp32_wiki_books.sh new file mode 100644 index 00000000..d8ecd68d --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x32x2_fp32_wiki_books.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --max_steps=7038 --num_steps_per_checkpoint=10000 --log_freq=1 --gradient_accumulation_steps=2" + +## System run parms +DGXNNODES=64 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=04:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x4x8_wiki_books_phase2_fp32.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x4x8_wiki_books_phase2_fp32.sh new file mode 100644 index 00000000..2acac7de --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x4x8_wiki_books_phase2_fp32.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=32 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.128 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --max_steps=1563 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=8 --resume_from_checkpoint" + +## System run parms +DGXNNODES=64 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=04:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x64_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x64_wiki_books.sh new file mode 100644 index 00000000..91b29144 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x64_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=7038 --num_steps_per_checkpoint=10000 --log_freq=1" + +## System run parms +DGXNNODES=64 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x4_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x4_wiki_books_phase2.sh new file mode 100644 index 00000000..db424bb5 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x4_wiki_books_phase2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=32 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.128 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1563 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=4 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=64 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x8_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x8_wiki_books_phase2.sh new file mode 100644 index 00000000..347732b5 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_64x16x8x8_wiki_books_phase2.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.256 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=782 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=8 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --accumulate_into_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=64 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x48_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x48_wiki_books.sh new file mode 100644 index 00000000..d57abe96 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x48_wiki_books.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=48 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=7038 --num_steps_per_checkpoint=10000 --log_freq=1" + +## System run parms +DGXNNODES=92 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x64_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x64_wiki_books.sh new file mode 100644 index 00000000..7bffbee5 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x64_wiki_books.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6.5e-3" +WARMUP_UPDATES=0.5107 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=4896 --num_steps_per_checkpoint=10000 --log_freq=1" + +## System run parms +DGXNNODES=92 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x3_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x3_wiki_books_phase2.sh new file mode 100644 index 00000000..805087db --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x3_wiki_books_phase2.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +## DL params +BATCHSIZE=24 +LEARNING_RATE="4.12e-3" +WARMUP_UPDATES=0.138 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1450 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=3 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=92 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x4_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x4_wiki_books_phase2.sh new file mode 100644 index 00000000..c84fa56d --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_92x16x8x4_wiki_books_phase2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +## DL params +BATCHSIZE=32 +LEARNING_RATE="5e-3" +WARMUP_UPDATES=0.192 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1088 --num_steps_per_checkpoint=1000 --log_freq=1 --gradient_accumulation_steps=4 --resume_from_checkpoint --phase2 --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=4896 --phase1_end_step=4896" + +## System run parms +DGXNNODES=92 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 +DEADLINE=$(date -d '+168 hours' '+%FT%T') +SLURM_EMAIL_TYPE="END" + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x40_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x40_wiki_books.sh new file mode 100644 index 00000000..6d1904d8 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x40_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=40 +LEARNING_RATE="6e-3" +WARMUP_UPDATES=0.2843 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=7508 --num_steps_per_checkpoint=10000 --log_freq=1" + +## System run parms +DGXNNODES=96 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x64_wiki_books.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x64_wiki_books.sh new file mode 100644 index 00000000..f36cd3d9 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x64_wiki_books.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=64 +LEARNING_RATE="6.5e-3" +WARMUP_UPDATES=0.5328 +EXTRA_PARAMS="--input_dir=/workspace/data --do_train --config_file=bert_config.json --max_seq_length=128 --max_predictions_per_seq=20 --output_dir /results/output --fp16 --max_steps=4692 --num_steps_per_checkpoint=10000 --log_freq=1" + +## System run parms +DGXNNODES=96 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x3_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x3_wiki_books_phase2.sh new file mode 100644 index 00000000..48a2dce1 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x3_wiki_books_phase2.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=24 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.144 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1390 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=3 --resume_from_checkpoint --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=7038" + +## System run parms +DGXNNODES=96 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x4_wiki_books_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x4_wiki_books_phase2.sh new file mode 100644 index 00000000..5f63828e --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_96x16x8x4_wiki_books_phase2.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=32 +LEARNING_RATE="5e-3" +WARMUP_UPDATES=0.192 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1042 --num_steps_per_checkpoint=1000 --log_freq=1 --gradient_accumulation_steps=4 --resume_from_checkpoint --phase2 --allreduce_post_accumulation --allreduce_post_accumulation_fp16 --resume_step=4896" + +## System run parms +DGXNNODES=96 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/config_DGX2_phase2.sh b/PyTorch/LanguageModeling/BERT/config_DGX2_phase2.sh new file mode 100644 index 00000000..eca8b8ee --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/config_DGX2_phase2.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +## DL params +BATCHSIZE=24 +LEARNING_RATE="4e-3" +WARMUP_UPDATES=0.144 +EXTRA_PARAMS="--input_dir=/workspace/data_phase2 --do_train --config_file=bert_config.json --max_seq_length=512 --max_predictions_per_seq=80 --output_dir /checkpoints --fp16 --max_steps=1390 --num_steps_per_checkpoint=1000 --log_freq=1 --phase2 --gradient_accumulation_steps=3 --resume_from_checkpoint" + +## System run parms +DGXNNODES=1 +DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' ) +WALLTIME=02:00:00 + +## System config params +DGXNGPU=16 +DGXSOCKETCORES=24 +DGXNSOCKET=2 +DGXHT=2 # HT is on is 2, HT off is 1 +DGXIBDEVICES='--device=/dev/infiniband/rdma_cm --device=/dev/infiniband/ucm10 --device=/dev/infiniband/ucm9 --device=/dev/infiniband/ucm8 --device=/dev/infiniband/ucm7 --device=/dev/infiniband/ucm4 --device=/dev/infiniband/ucm3 --device=/dev/infiniband/ucm2 --device=/dev/infiniband/ucm1 --device=/dev/infiniband/uverbs10 --device=/dev/infiniband/uverbs9 --device=/dev/infiniband/uverbs8 --device=/dev/infiniband/uverbs7 --device=/dev/infiniband/uverbs4 --device=/dev/infiniband/uverbs3 --device=/dev/infiniband/uverbs2 --device=/dev/infiniband/uverbs1 --device=/dev/infiniband/issm10 --device=/dev/infiniband/umad10 --device=/dev/infiniband/issm9 --device=/dev/infiniband/umad9 --device=/dev/infiniband/issm8 --device=/dev/infiniband/umad8 --device=/dev/infiniband/issm7 --device=/dev/infiniband/umad7 --device=/dev/infiniband/issm4 --device=/dev/infiniband/umad4 --device=/dev/infiniband/issm3 --device=/dev/infiniband/umad3 --device=/dev/infiniband/issm2 --device=/dev/infiniband/umad2 --device=/dev/infiniband/issm1 --device=/dev/infiniband/umad1' diff --git a/PyTorch/LanguageModeling/BERT/data/BooksDownloader.py b/PyTorch/LanguageModeling/BERT/data/BooksDownloader.py new file mode 100644 index 00000000..4b0f5340 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/BooksDownloader.py @@ -0,0 +1,16 @@ +# NVIDIA + +import subprocess + +class BooksDownloader: + def __init__(self, save_path): + self.save_path = save_path + pass + + + def download(self): + bookscorpus_download_command = 'python3 /workspace/bookcorpus/download_files.py --list /workspace/bookcorpus/url_list.jsonl --out' + bookscorpus_download_command += ' ' + self.save_path + '/bookscorpus' + bookscorpus_download_command += ' --trash-bad-count' + bookscorpus_download_process = subprocess.run(bookscorpus_download_command, shell=True, check=True) + bookscorpus_download_process.communicate() diff --git a/PyTorch/LanguageModeling/BERT/data/BookscorpusTextFormatting.py b/PyTorch/LanguageModeling/BERT/data/BookscorpusTextFormatting.py new file mode 100644 index 00000000..ae886917 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/BookscorpusTextFormatting.py @@ -0,0 +1,21 @@ +# NVIDIA + +import glob +import os + +class BookscorpusTextFormatting: + def __init__(self, books_path, output_filename, recursive = False): + self.books_path = books_path + self.recursive = recursive + self.output_filename = output_filename + + + # This puts one book per line + def merge(self): + with open(self.output_filename, mode='w', newline='\n') as ofile: + for filename in glob.glob(self.books_path + '/' + '*.txt', recursive=True): + with open(filename, mode='r', encoding='utf-8-sig', newline='\n') as file: + for line in file: + if line.strip() != '': + ofile.write(line.strip() + ' ') + ofile.write("\n\n") \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/data/Downloader.py b/PyTorch/LanguageModeling/BERT/data/Downloader.py new file mode 100644 index 00000000..cee5a786 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/Downloader.py @@ -0,0 +1,80 @@ +# NVIDIA + +from GooglePretrainedWeightDownloader import GooglePretrainedWeightDownloader +from NVIDIAPretrainedWeightDownloader import NVIDIAPretrainedWeightDownloader +from WikiDownloader import WikiDownloader +from BooksDownloader import BooksDownloader +from MRPCDownloader import MRPCDownloader +from SquadDownloader import SquadDownloader + + +class Downloader: + def __init__(self, dataset_name, save_path): + self.dataset_name = dataset_name + self.save_path = save_path + + + def download(self): + if self.dataset_name == 'bookscorpus': + self.download_bookscorpus() + + elif self.dataset_name == 'wikicorpus_en': + self.download_wikicorpus('en') + + elif self.dataset_name == 'wikicorpus_zh': + self.download_wikicorpus('zh') + + elif self.dataset_name == 'google_pretrained_weights': + self.download_google_pretrained_weights() + + elif self.dataset_name == 'nvidia_pretrained_weights': + self.download_nvidia_pretrained_weights() + + elif self.dataset_name == 'mrpc': + self.download_mrpc() + + elif self.dataset_name == 'squad': + self.download_squad() + + elif self.dataset_name == 'all': + self.download_bookscorpus(self.save_path) + self.download_wikicorpus('en', self.save_path) + self.download_wikicorpus('zh', self.save_path) + self.download_google_pretrained_weights(self.save_path) + self.download_nvidia_pretrained_weights(self.save_path) + self.download_mrpc(self.save_path) + self.download_squad(self.save_path) + + else: + print(self.dataset_name) + assert False, 'Unknown dataset_name provided to downloader' + + + def download_bookscorpus(self): + downloader = BooksDownloader(self.save_path) + downloader.download() + + + def download_wikicorpus(self, language): + downloader = WikiDownloader(language, self.save_path) + downloader.download() + + + def download_google_pretrained_weights(self): + downloader = GooglePretrainedWeightDownloader(self.save_path) + downloader.download() + + + def download_nvidia_pretrained_weights(self): + downloader = NVIDIAPretrainedWeightDownloader(self.save_path) + downloader.download() + + + def download_mrpc(self): + downloader = MRPCDownloader(self.save_path) + downloader.download() + + + def download_squad(self): + downloader = SquadDownloader(self.save_path) + downloader.download() diff --git a/PyTorch/LanguageModeling/BERT/data/GooglePretrainedWeightDownloader.py b/PyTorch/LanguageModeling/BERT/data/GooglePretrainedWeightDownloader.py new file mode 100644 index 00000000..ff47bdf2 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/GooglePretrainedWeightDownloader.py @@ -0,0 +1,147 @@ +# NVIDIA + +import hashlib +import os +import urllib.request +import zipfile + +class GooglePretrainedWeightDownloader: + def __init__(self, save_path): + self.save_path = save_path + '/google_pretrained_weights' + + if not os.path.exists(self.save_path): + os.makedirs(self.save_path) + + # Download urls + self.model_urls = { + 'bert_base_uncased': ('https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip', 'uncased_L-12_H-768_A-12.zip'), + 'bert_large_uncased': ('https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-24_H-1024_A-16.zip', 'uncased_L-24_H-1024_A-16.zip'), + 'bert_base_cased': ('https://storage.googleapis.com/bert_models/2018_10_18/cased_L-12_H-768_A-12.zip', 'cased_L-12_H-768_A-12.zip'), + 'bert_large_cased': ('https://storage.googleapis.com/bert_models/2018_10_18/cased_L-24_H-1024_A-16.zip', 'cased_L-24_H-1024_A-16.zip'), + 'bert_base_multilingual_cased': ('https://storage.googleapis.com/bert_models/2018_11_23/multi_cased_L-12_H-768_A-12.zip', 'multi_cased_L-12_H-768_A-12.zip'), + 'bert_large_multilingual_uncased': ('https://storage.googleapis.com/bert_models/2018_11_03/multilingual_L-12_H-768_A-12.zip', 'multilingual_L-12_H-768_A-12.zip'), + 'bert_base_chinese': ('https://storage.googleapis.com/bert_models/2018_11_03/chinese_L-12_H-768_A-12.zip', 'chinese_L-12_H-768_A-12.zip') + } + + # SHA256sum verification for file download integrity (and checking for changes from the download source over time) + self.bert_base_uncased_sha = { + 'bert_config.json': '7b4e5f53efbd058c67cda0aacfafb340113ea1b5797d9ce6ee411704ba21fcbc', + 'bert_model.ckpt.data-00000-of-00001': '58580dc5e0bf0ae0d2efd51d0e8272b2f808857f0a43a88aaf7549da6d7a8a84', + 'bert_model.ckpt.index': '04c1323086e2f1c5b7c0759d8d3e484afbb0ab45f51793daab9f647113a0117b', + 'bert_model.ckpt.meta': 'dd5682170a10c3ea0280c2e9b9a45fee894eb62da649bbdea37b38b0ded5f60e', + 'vocab.txt': '07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3', + } + + self.bert_large_uncased_sha = { + 'bert_config.json': 'bfa42236d269e2aeb3a6d30412a33d15dbe8ea597e2b01dc9518c63cc6efafcb', + 'bert_model.ckpt.data-00000-of-00001': 'bc6b3363e3be458c99ecf64b7f472d2b7c67534fd8f564c0556a678f90f4eea1', + 'bert_model.ckpt.index': '68b52f2205ffc64dc627d1120cf399c1ef1cbc35ea5021d1afc889ffe2ce2093', + 'bert_model.ckpt.meta': '6fcce8ff7628f229a885a593625e3d5ff9687542d5ef128d9beb1b0c05edc4a1', + 'vocab.txt': '07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3', + } + + self.bert_base_cased_sha = { + 'bert_config.json': 'f11dfb757bea16339a33e1bf327b0aade6e57fd9c29dc6b84f7ddb20682f48bc', + 'bert_model.ckpt.data-00000-of-00001': '734d5a1b68bf98d4e9cb6b6692725d00842a1937af73902e51776905d8f760ea', + 'bert_model.ckpt.index': '517d6ef5c41fc2ca1f595276d6fccf5521810d57f5a74e32616151557790f7b1', + 'bert_model.ckpt.meta': '5f8a9771ff25dadd61582abb4e3a748215a10a6b55947cbb66d0f0ba1694be98', + 'vocab.txt': 'eeaa9875b23b04b4c54ef759d03db9d1ba1554838f8fb26c5d96fa551df93d02', + } + + self.bert_large_cased_sha = { + 'bert_config.json': '7adb2125c8225da495656c982fd1c5f64ba8f20ad020838571a3f8a954c2df57', + 'bert_model.ckpt.data-00000-of-00001': '6ff33640f40d472f7a16af0c17b1179ca9dcc0373155fb05335b6a4dd1657ef0', + 'bert_model.ckpt.index': 'ef42a53f577fbe07381f4161b13c7cab4f4fc3b167cec6a9ae382c53d18049cf', + 'bert_model.ckpt.meta': 'd2ddff3ed33b80091eac95171e94149736ea74eb645e575d942ec4a5e01a40a1', + 'vocab.txt': 'eeaa9875b23b04b4c54ef759d03db9d1ba1554838f8fb26c5d96fa551df93d02', + } + + self.bert_base_multilingual_cased_sha = { + 'bert_config.json': 'e76c3964bc14a8bb37a5530cdc802699d2f4a6fddfab0611e153aa2528f234f0', + 'bert_model.ckpt.data-00000-of-00001': '55b8a2df41f69c60c5180e50a7c31b7cdf6238909390c4ddf05fbc0d37aa1ac5', + 'bert_model.ckpt.index': '7d8509c2a62b4e300feb55f8e5f1eef41638f4998dd4d887736f42d4f6a34b37', + 'bert_model.ckpt.meta': '95e5f1997e8831f1c31e5cf530f1a2e99f121e9cd20887f2dce6fe9e3343e3fa', + 'vocab.txt': 'fe0fda7c425b48c516fc8f160d594c8022a0808447475c1a7c6d6479763f310c', + } + + self.bert_large_multilingual_uncased_sha = { + 'bert_config.json': '49063bb061390211d2fdd108cada1ed86faa5f90b80c8f6fdddf406afa4c4624', + 'bert_model.ckpt.data-00000-of-00001': '3cd83912ebeb0efe2abf35c9f1d5a515d8e80295e61c49b75c8853f756658429', + 'bert_model.ckpt.index': '87c372c1a3b1dc7effaaa9103c80a81b3cbab04c7933ced224eec3b8ad2cc8e7', + 'bert_model.ckpt.meta': '27f504f34f02acaa6b0f60d65195ec3e3f9505ac14601c6a32b421d0c8413a29', + 'vocab.txt': '87b44292b452f6c05afa49b2e488e7eedf79ea4f4c39db6f2f4b37764228ef3f', + } + + self.bert_base_chinese_sha = { + 'bert_config.json': '7aaad0335058e2640bcb2c2e9a932b1cd9da200c46ea7b8957d54431f201c015', + 'bert_model.ckpt.data-00000-of-00001': '756699356b78ad0ef1ca9ba6528297bcb3dd1aef5feadd31f4775d7c7fc989ba', + 'bert_model.ckpt.index': '46315546e05ce62327b3e2cd1bed22836adcb2ff29735ec87721396edb21b82e', + 'bert_model.ckpt.meta': 'c0f8d51e1ab986604bc2b25d6ec0af7fd21ff94cf67081996ec3f3bf5d823047', + 'vocab.txt': '45bbac6b341c319adc98a532532882e91a9cefc0329aa57bac9ae761c27b291c', + } + + # Relate SHA to urls for loop below + self.model_sha = { + 'bert_base_uncased': self.bert_base_uncased_sha, + 'bert_large_uncased': self.bert_large_uncased_sha, + 'bert_base_cased': self.bert_base_cased_sha, + 'bert_large_cased': self.bert_large_cased_sha, + 'bert_base_multilingual_cased': self.bert_base_multilingual_cased_sha, + 'bert_large_multilingual_uncased': self.bert_large_multilingual_uncased_sha, + 'bert_base_chinese': self.bert_base_chinese_sha + } + + # Helper to get sha256sum of a file + def sha256sum(self, filename): + h = hashlib.sha256() + b = bytearray(128*1024) + mv = memoryview(b) + with open(filename, 'rb', buffering=0) as f: + for n in iter(lambda : f.readinto(mv), 0): + h.update(mv[:n]) + + return h.hexdigest() + + def download(self): + # Iterate over urls: download, unzip, verify sha256sum + found_mismatch_sha = False + for model in self.model_urls: + url = self.model_urls[model][0] + file = self.save_path + '/' + self.model_urls[model][1] + + print('Downloading', url) + response = urllib.request.urlopen(url) + with open(file, 'wb') as handle: + handle.write(response.read()) + + print('Unzipping', file) + zip = zipfile.ZipFile(file, 'r') + zip.extractall(self.save_path) + zip.close() + + sha_dict = self.model_sha[model] + for extracted_file in sha_dict: + sha = sha_dict[extracted_file] + if sha != self.sha256sum(file[:-4] + '/' + extracted_file): + found_mismatch_sha = True + print('SHA256sum does not match on file:', extracted_file, 'from download url:', url) + else: + print(file[:-4] + '/' + extracted_file, '\t', 'verified') + + if not found_mismatch_sha: + print("All downloads pass sha256sum verification.") + + def serialize(self): + pass + + def deserialize(self): + pass + + def listAvailableWeights(self): + print("Available Weight Datasets") + for item in self.model_urls: + print(item) + + def listLocallyStoredWeights(self): + pass + diff --git a/PyTorch/LanguageModeling/BERT/data/MRPCDownloader.py b/PyTorch/LanguageModeling/BERT/data/MRPCDownloader.py new file mode 100644 index 00000000..ad85779f --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/MRPCDownloader.py @@ -0,0 +1,33 @@ +# NVIDIA + +import bz2 +import os +import urllib.request +import sys + +class MRPCDownloader: + def __init__(self, save_path): + self.save_path = save_path + '/mrpc' + + if not os.path.exists(self.save_path): + os.makedirs(self.save_path) + + # Documentation - Download link obtained from here: https://github.com/nyu-mll/GLUE-baselines/blob/master/download_glue_data.py + self.download_urls = { + 'https://firebasestorage.googleapis.com/v0/b/mtl-sentence-representations.appspot.com/o/data%2Fmrpc_dev_ids.tsv?alt=media&token=ec5c0836-31d5-48f4-b431-7480817f1adc' : 'mrpc_dev_ids.tsv' + } + + def download(self): + for item in self.download_urls: + url = item + file = self.download_urls[item] + + print('Downloading:', url) + if os.path.isfile(self.save_path + '/' + file): + print('** Download file already exists, skipping download') + else: + response = urllib.request.urlopen(url) + with open(self.save_path + '/' + file, "wb") as handle: + handle.write(response.read()) + + diff --git a/PyTorch/LanguageModeling/BERT/data/NVIDIAPretrainedWeightDownloader.py b/PyTorch/LanguageModeling/BERT/data/NVIDIAPretrainedWeightDownloader.py new file mode 100644 index 00000000..d8749f60 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/NVIDIAPretrainedWeightDownloader.py @@ -0,0 +1,16 @@ +# NVIDIA + +import os + +class NVIDIAPretrainedWeightDownloader: + def __init__(self, save_path): + self.save_path = save_path + '/nvidia_pretrained_weights' + + if not os.path.exists(self.save_path): + os.makedirs(self.save_path) + + pass + + + def download(self): + assert False, 'NVIDIAPretrainedWeightDownloader not implemented yet.' \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/data/SquadDownloader.py b/PyTorch/LanguageModeling/BERT/data/SquadDownloader.py new file mode 100644 index 00000000..7a250cb9 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/SquadDownloader.py @@ -0,0 +1,43 @@ +# NVIDIA + +import bz2 +import os +import urllib.request +import sys + +class SquadDownloader: + def __init__(self, save_path): + self.save_path = save_path + '/squad' + + if not os.path.exists(self.save_path): + os.makedirs(self.save_path) + + if not os.path.exists(self.save_path + '/v1.1'): + os.makedirs(self.save_path + '/v1.1') + + if not os.path.exists(self.save_path + '/v2.0'): + os.makedirs(self.save_path + '/v2.0') + + self.download_urls = { + 'https://rajpurkar.github.io/SQuAD-explorer/dataset/train-v1.1.json' : 'v1.1/train-v1.1.json', + 'https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v1.1.json' : 'v1.1/dev-v1.1.json', + 'https://worksheets.codalab.org/rest/bundles/0xbcd57bee090b421c982906709c8c27e1/contents/blob/' : 'v1.1/evaluate-v1.1.py', + 'https://rajpurkar.github.io/SQuAD-explorer/dataset/train-v2.0.json' : 'v2.0/train-v2.0.json', + 'https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v2.0.json' : 'v2.0/dev-v2.0.json', + 'https://worksheets.codalab.org/rest/bundles/0x6b567e1cf2e041ec80d7098f031c5c9e/contents/blob/' : 'v2.0/evaluate-v2.0.py', + } + + def download(self): + for item in self.download_urls: + url = item + file = self.download_urls[item] + + print('Downloading:', url) + if os.path.isfile(self.save_path + '/' + file): + print('** Download file already exists, skipping download') + else: + response = urllib.request.urlopen(url) + with open(self.save_path + '/' + file, "wb") as handle: + handle.write(response.read()) + + diff --git a/PyTorch/LanguageModeling/BERT/data/TextSharding.py b/PyTorch/LanguageModeling/BERT/data/TextSharding.py new file mode 100644 index 00000000..5b25903b --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/TextSharding.py @@ -0,0 +1,316 @@ +# NVIDIA + +from collections import defaultdict +from itertools import islice + +import multiprocessing +import statistics + +class Sharding: + def __init__(self, input_files, output_name_prefix, n_training_shards, n_test_shards, fraction_test_set): + assert len(input_files) > 0, 'The input file list must contain at least one file.' + assert n_training_shards > 0, 'There must be at least one output shard.' + assert n_test_shards > 0, 'There must be at least one output shard.' + + self.n_training_shards = n_training_shards + self.n_test_shards = n_test_shards + self.fraction_test_set = fraction_test_set + + self.input_files = input_files + + self.output_name_prefix = output_name_prefix + self.output_training_identifier = '_training' + self.output_test_identifier = '_test' + self.output_file_extension = '.txt' + + self.articles = {} # key: integer identifier, value: list of articles + self.sentences = {} # key: integer identifier, value: list of sentences + self.output_training_files = {} # key: filename, value: list of articles to go into file + self.output_test_files = {} # key: filename, value: list of articles to go into file + + self.init_output_files() + + + # Remember, the input files contain one article per line (the whitespace check is to skip extraneous blank lines) + def load_articles(self): + print('Start: Loading Articles') + + global_article_count = 0 + for input_file in self.input_files: + print('input file:', input_file) + with open(input_file, mode='r', newline='\n') as f: + for i, line in enumerate(f): + if line.strip(): + self.articles[global_article_count] = line.rstrip() + global_article_count += 1 + + print('End: Loading Articles: There are', len(self.articles), 'articles.') + + + def segment_articles_into_sentences(self, segmenter): + print('Start: Sentence Segmentation') + if len(self.articles) is 0: + self.load_articles() + + assert len(self.articles) is not 0, 'Please check that input files are present and contain data.' + + # TODO: WIP: multiprocessing (create independent ranges and spawn processes) + use_multiprocessing = 'serial' + + def chunks(data, size=len(self.articles)): + it = iter(data) + for i in range(0, len(data), size): + yield {k: data[k] for k in islice(it, size)} + + if use_multiprocessing == 'manager': + manager = multiprocessing.Manager() + return_dict = manager.dict() + jobs = [] + n_processes = 7 # in addition to the main process, total = n_proc+1 + + def work(articles, return_dict): + sentences = {} + for i, article in enumerate(articles): + sentences[i] = segmenter.segment_string(articles[article]) + + if i % 5000 == 0: + print('Segmenting article', i) + + return_dict.update(sentences) + + for item in chunks(self.articles, len(self.articles)): + p = multiprocessing.Process(target=work, args=(item, return_dict)) + + # Busy wait + while len(jobs) >= n_processes: + pass + + jobs.append(p) + p.start() + + for proc in jobs: + proc.join() + + elif use_multiprocessing == 'queue': + work_queue = multiprocessing.Queue() + jobs = [] + + for item in chunks(self.articles, len(self.articles)): + pass + + else: # serial option + for i, article in enumerate(self.articles): + self.sentences[i] = segmenter.segment_string(self.articles[article]) + + if i % 5000 == 0: + print('Segmenting article', i) + + print('End: Sentence Segmentation') + + + def init_output_files(self): + print('Start: Init Output Files') + assert len(self.output_training_files) is 0, 'Internal storage self.output_files already contains data. This function is intended to be used by the constructor only.' + assert len(self.output_test_files) is 0, 'Internal storage self.output_files already contains data. This function is intended to be used by the constructor only.' + + for i in range(self.n_training_shards): + name = self.output_name_prefix + self.output_training_identifier + '_' + str(i) + self.output_file_extension + self.output_training_files[name] = [] + + for i in range(self.n_test_shards): + name = self.output_name_prefix + self.output_test_identifier + '_' + str(i) + self.output_file_extension + self.output_test_files[name] = [] + + print('End: Init Output Files') + + + def get_sentences_per_shard(self, shard): + result = 0 + for article_id in shard: + result += len(self.sentences[article_id]) + + return result + + + def distribute_articles_over_shards(self): + print('Start: Distribute Articles Over Shards') + assert len(self.articles) >= self.n_training_shards + self.n_test_shards, 'There are fewer articles than shards. Please add more data or reduce the number of shards requested.' + + # Create dictionary with - key: sentence count per article, value: article id number + sentence_counts = defaultdict(lambda: []) + + max_sentences = 0 + total_sentences = 0 + + for article_id in self.sentences: + current_length = len(self.sentences[article_id]) + sentence_counts[current_length].append(article_id) + max_sentences = max(max_sentences, current_length) + total_sentences += current_length + + n_sentences_assigned_to_training = int((1 - self.fraction_test_set) * total_sentences) + nominal_sentences_per_training_shard = n_sentences_assigned_to_training // self.n_training_shards + nominal_sentences_per_test_shard = (total_sentences - n_sentences_assigned_to_training) // self.n_test_shards + + consumed_article_set = set({}) + unused_article_set = set(self.articles.keys()) + + # Make first pass and add one article worth of lines per file + for file in self.output_training_files: + current_article_id = sentence_counts[max_sentences][-1] + sentence_counts[max_sentences].pop(-1) + self.output_training_files[file].append(current_article_id) + consumed_article_set.add(current_article_id) + unused_article_set.remove(current_article_id) + + # Maintain the max sentence count + while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0: + max_sentences -= 1 + + if len(self.sentences[current_article_id]) > nominal_sentences_per_training_shard: + nominal_sentences_per_training_shard = len(self.sentences[current_article_id]) + print('Warning: A single article contains more than the nominal number of sentences per training shard.') + + for file in self.output_test_files: + current_article_id = sentence_counts[max_sentences][-1] + sentence_counts[max_sentences].pop(-1) + self.output_test_files[file].append(current_article_id) + consumed_article_set.add(current_article_id) + unused_article_set.remove(current_article_id) + + # Maintain the max sentence count + while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0: + max_sentences -= 1 + + if len(self.sentences[current_article_id]) > nominal_sentences_per_test_shard: + nominal_sentences_per_test_shard = len(self.sentences[current_article_id]) + print('Warning: A single article contains more than the nominal number of sentences per test shard.') + + training_counts = [] + test_counts = [] + + for shard in self.output_training_files: + training_counts.append(self.get_sentences_per_shard(self.output_training_files[shard])) + + for shard in self.output_test_files: + test_counts.append(self.get_sentences_per_shard(self.output_test_files[shard])) + + training_median = statistics.median(training_counts) + test_median = statistics.median(test_counts) + + # Make subsequent passes over files to find articles to add without going over limit + history_remaining = [] + n_history_remaining = 4 + + while len(consumed_article_set) < len(self.articles): + for fidx, file in enumerate(self.output_training_files): + nominal_next_article_size = min(nominal_sentences_per_training_shard - training_counts[fidx], max_sentences) + + # Maintain the max sentence count + while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0: + max_sentences -= 1 + + while len(sentence_counts[nominal_next_article_size]) == 0 and nominal_next_article_size > 0: + nominal_next_article_size -= 1 + + if nominal_next_article_size not in sentence_counts or nominal_next_article_size is 0 or training_counts[fidx] > training_median: + continue # skip adding to this file, will come back later if no file can accept unused articles + + current_article_id = sentence_counts[nominal_next_article_size][-1] + sentence_counts[nominal_next_article_size].pop(-1) + + self.output_training_files[file].append(current_article_id) + consumed_article_set.add(current_article_id) + unused_article_set.remove(current_article_id) + + for fidx, file in enumerate(self.output_test_files): + nominal_next_article_size = min(nominal_sentences_per_test_shard - test_counts[fidx], max_sentences) + + # Maintain the max sentence count + while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0: + max_sentences -= 1 + + while len(sentence_counts[nominal_next_article_size]) == 0 and nominal_next_article_size > 0: + nominal_next_article_size -= 1 + + if nominal_next_article_size not in sentence_counts or nominal_next_article_size is 0 or test_counts[fidx] > test_median: + continue # skip adding to this file, will come back later if no file can accept unused articles + + current_article_id = sentence_counts[nominal_next_article_size][-1] + sentence_counts[nominal_next_article_size].pop(-1) + + self.output_test_files[file].append(current_article_id) + consumed_article_set.add(current_article_id) + unused_article_set.remove(current_article_id) + + # If unable to place articles a few times, bump up nominal sizes by fraction until articles get placed + if len(history_remaining) == n_history_remaining: + history_remaining.pop(0) + history_remaining.append(len(unused_article_set)) + + history_same = True + for i in range(1, len(history_remaining)): + history_same = history_same and (history_remaining[i-1] == history_remaining[i]) + + if history_same: + nominal_sentences_per_training_shard += 1 + # nominal_sentences_per_test_shard += 1 + + training_counts = [] + test_counts = [] + for shard in self.output_training_files: + training_counts.append(self.get_sentences_per_shard(self.output_training_files[shard])) + + for shard in self.output_test_files: + test_counts.append(self.get_sentences_per_shard(self.output_test_files[shard])) + + training_median = statistics.median(training_counts) + test_median = statistics.median(test_counts) + + print('Distributing data over shards:', len(unused_article_set), 'articles remaining.') + + + if len(unused_article_set) != 0: + print('Warning: Some articles did not make it into output files.') + + + for shard in self.output_training_files: + print('Training shard:', self.get_sentences_per_shard(self.output_training_files[shard])) + + for shard in self.output_test_files: + print('Test shard:', self.get_sentences_per_shard(self.output_test_files[shard])) + + print('End: Distribute Articles Over Shards') + + + def write_shards_to_disk(self): + print('Start: Write Shards to Disk') + for shard in self.output_training_files: + self.write_single_shard(shard, self.output_training_files[shard]) + + for shard in self.output_test_files: + self.write_single_shard(shard, self.output_test_files[shard]) + + print('End: Write Shards to Disk') + + + def write_single_shard(self, shard_name, shard): + with open(shard_name, mode='w', newline='\n') as f: + for article_id in shard: + for line in self.sentences[article_id]: + f.write(line + '\n') + + f.write('\n') # Line break between articles + + +import nltk + +nltk.download('punkt') + +class NLTKSegmenter: + def __init(self): + pass + + def segment_string(self, article): + return nltk.tokenize.sent_tokenize(article) + diff --git a/PyTorch/LanguageModeling/BERT/data/WikiDownloader.py b/PyTorch/LanguageModeling/BERT/data/WikiDownloader.py new file mode 100644 index 00000000..0fc19269 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/WikiDownloader.py @@ -0,0 +1,58 @@ +# NVIDIA + +import bz2 +import os +import urllib.request +import sys + +class WikiDownloader: + def __init__(self, language, save_path): + self.save_path = save_path + '/wikicorpus_' + language + + if not os.path.exists(self.save_path): + os.makedirs(self.save_path) + + self.language = language + self.download_urls = { + 'en' : 'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-pages-articles.xml.bz2', + 'zh' : 'https://dumps.wikimedia.org/zhwiki/latest/zhwiki-latest-pages-articles.xml.bz2' + } + + self.output_files = { + 'en' : 'wikicorpus_en.xml.bz2', + 'zh' : 'wikicorpus_zh.xml.bz2' + } + + + def download(self): + if self.language in self.download_urls: + url = self.download_urls[self.language] + file = self.output_files[self.language] + + print('Downloading:', url) + if os.path.isfile(self.save_path + '/' + file): + print('** Download file already exists, skipping download') + else: + response = urllib.request.urlopen(url) + with open(self.save_path + '/' + file, "wb") as handle: + handle.write(response.read()) + + # Always unzipping since this is relatively fast and will overwrite + print('Unzipping:', self.output_files[self.language]) + #with open(self.save_path + '/' + file, mode='rb', buffering=131072) as f: + # it = iter(lambda: f.read(131072), b'') + # self.decompression(it, sys.stdout.buffer) + + zip = bz2.BZ2File(self.save_path + '/' + file) + open(self.save_path + '/wikicorpus_' + self.language + '.xml', mode='wb', buffering=131072).write(zip.read()) + + else: + assert False, 'WikiDownloader not implemented for this language yet.' + + def decompression(self, input, output): + decomp = bz2.BZ2Decompressor() + + for chunk in input: + dc = decomp.decompress(chunk) + output.write(dc) + diff --git a/PyTorch/LanguageModeling/BERT/data/WikicorpusTextFormatting.py b/PyTorch/LanguageModeling/BERT/data/WikicorpusTextFormatting.py new file mode 100644 index 00000000..f0c6f68f --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/WikicorpusTextFormatting.py @@ -0,0 +1,35 @@ +# NVIDIA + +import glob +import os + +class WikicorpusTextFormatting: + def __init__(self, wiki_path, output_filename, recursive = False): + self.wiki_path = wiki_path + self.recursive = recursive + self.output_filename = output_filename + + + # This puts one article per line + def merge(self): + with open(self.output_filename, mode='w', newline='\n') as ofile: + for dirname in glob.glob(self.wiki_path + '/*/', recursive=False): + for filename in glob.glob(dirname + 'wiki_*', recursive=self.recursive): + print(filename) + article_lines = [] + article_open = False + + with open(filename, mode='r', newline='\n') as file: + for line in file: + if '' in line: + article_open = False + for oline in article_lines[1:]: + if oline != '\n': + ofile.write(oline.rstrip() + " ") + ofile.write("\n\n") + article_lines = [] + else: + if article_open: + article_lines.append(line) \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/data/__init__.py b/PyTorch/LanguageModeling/BERT/data/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/PyTorch/LanguageModeling/BERT/data/bertPrep.py b/PyTorch/LanguageModeling/BERT/data/bertPrep.py new file mode 100644 index 00000000..109b7f97 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/data/bertPrep.py @@ -0,0 +1,345 @@ +# NVIDIA + +import BookscorpusTextFormatting +import Downloader +import TextSharding +import WikicorpusTextFormatting + +import argparse +import itertools +import multiprocessing +import os +import pprint +import subprocess + + +def main(args): + working_dir = os.environ['BERT_PREP_WORKING_DIR'] + + print('Working Directory:', working_dir) + print('Action:', args.action) + print('Dataset Name:', args.dataset) + + if args.input_files: + args.input_files = args.input_files.split(',') + + directory_structure = { + 'download' : working_dir + '/download', # Downloaded and decompressed + 'extracted' : working_dir +'/extracted', # Extracted from whatever the initial format is (e.g., wikiextractor) + 'formatted' : working_dir + '/formatted_one_article_per_line', # This is the level where all sources should look the same + 'sharded' : working_dir + '/sharded', + 'tfrecord' : working_dir + '/tfrecord', + 'hdf5': working_dir + '/hdf5' + } + + print('\nDirectory Structure:') + pp = pprint.PrettyPrinter(indent=2) + pp.pprint(directory_structure) + print('') + + if args.action == 'download': + if not os.path.exists(directory_structure['download']): + os.makedirs(directory_structure['download']) + + downloader = Downloader.Downloader(args.dataset, directory_structure['download']) + downloader.download() + + elif args.action == 'text_formatting': + assert args.dataset != 'google_pretrained_weights' and args.dataset != 'nvidia_pretrained_weights' and args.dataset != 'squad' and args.dataset != 'mrpc', 'Cannot perform text_formatting on pretrained weights' + + if not os.path.exists(directory_structure['extracted']): + os.makedirs(directory_structure['extracted']) + + if not os.path.exists(directory_structure['formatted']): + os.makedirs(directory_structure['formatted']) + + if args.dataset == 'bookscorpus': + books_path = directory_structure['download'] + '/bookscorpus' + #books_path = directory_structure['download'] + output_filename = directory_structure['formatted'] + '/bookscorpus_one_book_per_line.txt' + books_formatter = BookscorpusTextFormatting.BookscorpusTextFormatting(books_path, output_filename, recursive=True) + books_formatter.merge() + + elif args.dataset == 'wikicorpus_en': + if args.skip_wikiextractor == 0: + path_to_wikiextractor_in_container = '/workspace/wikiextractor/WikiExtractor.py' + wikiextractor_command = path_to_wikiextractor_in_container + ' ' + directory_structure['download'] + '/' + args.dataset + '/wikicorpus_en.xml ' + '-b 100M --processes ' + str(args.n_processes) + ' -o ' + directory_structure['extracted'] + '/' + args.dataset + print('WikiExtractor Command:', wikiextractor_command) + wikiextractor_process = subprocess.run(wikiextractor_command, shell=True, check=True) + #wikiextractor_process.communicate() + + wiki_path = working_dir + '/' + directory_structure['extracted'] + '/wikicorpus_en' + output_filename = directory_structure['formatted'] + '/wikicorpus_en_one_article_per_line.txt' + wiki_formatter = WikicorpusTextFormatting.WikicorpusTextFormatting(wiki_path, output_filename, recursive=True) + wiki_formatter.merge() + + elif args.dataset == 'wikicorpus_zh': + assert False, 'wikicorpus_zh not fully supported at this time. The simplified/tradition Chinese data needs to be translated and properly segmented still, and should work once this step is added.' + if args.skip_wikiextractor == 0: + path_to_wikiextractor_in_container = '/workspace/wikiextractor/WikiExtractor.py' + wikiextractor_command = path_to_wikiextractor_in_container + ' ' + directory_structure['download'] + '/' + args.dataset + '/wikicorpus_zh.xml ' + '-b 100M --processes ' + str(args.n_processes) + ' -o ' + directory_structure['extracted'] + '/' + args.dataset + print('WikiExtractor Command:', wikiextractor_command) + wikiextractor_process = subprocess.run(wikiextractor_command, shell=True, check=True) + #wikiextractor_process.communicate() + + wiki_path = working_dir + '/' + directory_structure['extracted'] + '/wikicorpus_zh' + output_filename = directory_structure['formatted'] + '/wikicorpus_zh_one_article_per_line.txt' + wiki_formatter = WikicorpusTextFormatting.WikicorpusTextFormatting(wiki_path, output_filename, recursive=True) + wiki_formatter.merge() + + elif args.action == 'sharding': + # Note: books+wiki requires user to provide list of input_files (comma-separated with no spaces) + if args.dataset == 'bookscorpus' or 'wikicorpus' in args.dataset or 'books_wiki' in args.dataset: + if args.input_files is None: + if args.dataset == 'bookscorpus': + args.input_files = [directory_structure['formatted'] + '/bookscorpus_one_book_per_line.txt'] + elif args.dataset == 'wikicorpus_en': + args.input_files = [directory_structure['formatted'] + '/wikicorpus_en_one_article_per_line.txt'] + elif args.dataset == 'wikicorpus_zh': + args.input_files = [directory_structure['formatted'] + '/wikicorpus_zh_one_article_per_line.txt'] + elif args.dataset == 'books_wiki_en_corpus': + args.input_files = [directory_structure['formatted'] + '/bookscorpus_one_book_per_line.txt', directory_structure['formatted'] + '/wikicorpus_en_one_article_per_line.txt'] + + if args.output_file_prefix is None: + args.output_file_prefix = directory_structure['sharded'] + '/' + args.dataset + '/' + args.dataset + + if not os.path.exists(directory_structure['sharded']): + os.makedirs(directory_structure['sharded']) + + if not os.path.exists(directory_structure['sharded'] + '/' + args.dataset): + os.makedirs(directory_structure['sharded'] + '/' + args.dataset) + + # Segmentation is here because all datasets look the same in one article/book/whatever per line format, and + # it seemed unnecessarily complicated to add an additional preprocessing step to call just for this. + # Different languages (e.g., Chinese simplified/traditional) may require translation and + # other packages to be called from here -- just add a conditional branch for those extra steps + segmenter = TextSharding.NLTKSegmenter() + sharding = TextSharding.Sharding(args.input_files, args.output_file_prefix, args.n_training_shards, args.n_test_shards, args.fraction_test_set) + + sharding.load_articles() + sharding.segment_articles_into_sentences(segmenter) + sharding.distribute_articles_over_shards() + sharding.write_shards_to_disk() + + else: + assert False, 'Unsupported dataset for sharding' + + elif args.action == 'create_tfrecord_files': + assert False, 'TFrecord creation not supported in this PyTorch model example release.' \ + '' + if not os.path.exists(directory_structure['tfrecord']): + os.makedirs(directory_structure['tfrecord']) + + def create_record_worker(filename_prefix, shard_id, output_format='tfrecord'): + bert_preprocessing_command = 'python /workspace/bert/create_pretraining_data.py' + bert_preprocessing_command += ' --input_file=' + directory_structure['sharded'] + '/' + args.dataset + '/' + filename_prefix + '_' + str(shard_id) + '.txt' + bert_preprocessing_command += ' --output_file=' + directory_structure['tfrecord'] + '/' + args.dataset + '/' + filename_prefix + '_' + str(shard_id) + '.' + output_format + bert_preprocessing_command += ' --vocab_file=' + args.vocab_file + bert_preprocessing_command += ' --do_lower_case=' + 'true' if args.do_lower_case else 'false' + bert_preprocessing_command += ' --max_seq_length=' + str(args.max_seq_length) + bert_preprocessing_command += ' --max_predictions_per_seq=' + str(args.max_predictions_per_seq) + bert_preprocessing_command += ' --masked_lm_prob=' + str(args.masked_lm_prob) + bert_preprocessing_command += ' --random_seed=' + str(args.random_seed) + bert_preprocessing_command += ' --dupe_factor=' + str(args.dupe_factor) + bert_preprocessing_process = subprocess.Popen(bert_preprocessing_command, shell=True) + bert_preprocessing_process.communicate() + + last_process = bert_preprocessing_process + + # This could be better optimized (fine if all take equal time) + if shard_id % args.n_processes == 0 and shard_id > 0: + bert_preprocessing_process.wait() + + for i in range(args.n_training_shards): + create_record_worker(args.output_file_prefix + '_training', i) + + last_process.wait() + + for i in range(args.n_test_shards): + create_record_worker(args.output_file_prefix + '_test', i) + + last_process.wait() + + + elif args.action == 'create_hdf5_files': + last_process = None + + def create_record_worker(filename_prefix, shard_id, output_format='hdf5'): + bert_preprocessing_command = 'python /workspace/bert/create_pretraining_data.py' + bert_preprocessing_command += ' --input_file=' + directory_structure['sharded'] + '/' + args.dataset + '/' + filename_prefix + '_' + str(shard_id) + '.txt' + bert_preprocessing_command += ' --output_file=' + directory_structure['tfrecord'] + '/' + args.dataset + '/' + filename_prefix + '_' + str(shard_id) + '.' + output_format + bert_preprocessing_command += ' --vocab_file=' + args.vocab_file + bert_preprocessing_command += ' --do_lower_case' if args.do_lower_case else '' + bert_preprocessing_command += ' --max_seq_length=' + args.max_seq_length + bert_preprocessing_command += ' --max_predictions_per_seq=' + args.max_predictions_per_seq + bert_preprocessing_command += ' --masked_lm_prob=' + args.masked_lm_prob + bert_preprocessing_command += ' --random_seed=' + args.random_seed + bert_preprocessing_command += ' --dupe_factor=' + args.dupe_factor + bert_preprocessing_process = subprocess.Popen(bert_preprocessing_command, shell=True) + bert_preprocessing_process.communicate() + + last_process = bert_preprocessing_process + + # This could be better optimized (fine if all take equal time) + if shard_id % args.n_processes == 0 and shard_id > 0: + bert_preprocessing_process.wait() + + for i in range(args.n_training_shards): + create_record_worker(args.output_file_prefix + '_training', i) + + last_process.wait() + + for i in range(args.n_test_shards): + create_record_worker(args.output_file_prefix + '_test', i) + + last_process.wait() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description='Preprocessing Application for Everything BERT-related' + ) + + parser.add_argument( + '--action', + type=str, + help='Specify the action you want the app to take. e.g., generate vocab, segment, create tfrecords', + choices={ + 'download', # Download and verify mdf5/sha sums + 'text_formatting', # Convert into a file that contains one article/book per line + 'sharding', # Convert previous formatted text into shards containing one sentence per line + 'create_tfrecord_files', # Turn each shard into a TFrecord with masking and next sentence prediction info + 'create_hdf5_files' # Turn each shard into a HDF5 file with masking and next sentence prediction info + } + ) + + parser.add_argument( + '--dataset', + type=str, + help='Specify the dataset to perform --action on', + choices={ + 'bookscorpus', + 'wikicorpus_en', + 'wikicorpus_zh', + 'books_wiki_en_corpus', + 'google_pretrained_weights', + 'nvidia_pretrained_weights', + 'mrpc', + 'squad', + 'all' + } + ) + + parser.add_argument( + '--input_files', + type=str, + help='Specify the input files in a comma-separated list (no spaces)' + ) + + parser.add_argument( + '--output_file_prefix', + type=str, + help='Specify the naming convention (prefix) of the output files' + ) + + parser.add_argument( + '--n_training_shards', + type=int, + help='Specify the number of training shards to generate', + default=256 + ) + + parser.add_argument( + '--n_test_shards', + type=int, + help='Specify the number of test shards to generate', + default=256 + ) + + parser.add_argument( + '--fraction_test_set', + type=float, + help='Specify the fraction (0..1) of the data to withhold for the test data split (based on number of sequences)', + default=0.2 + ) + + parser.add_argument( + '--segmentation_method', + type=str, + help='Specify your choice of sentence segmentation', + choices={ + 'nltk' + }, + default='nltk' + ) + + parser.add_argument( + '--n_processes', + type=int, + help='Specify the max number of processes to allow at one time', + default=4 + ) + + parser.add_argument( + '--random_seed', + type=int, + help='Specify the base seed to use for any random number generation', + default=12345 + ) + + parser.add_argument( + '--dupe_factor', + type=int, + help='Specify the duplication factor', + default=5 + ) + + parser.add_argument( + '--masked_lm_prob', + type=float, + help='Specify the probability for masked lm', + default=0.15 + ) + + parser.add_argument( + '--max_seq_length', + type=int, + help='Specify the maximum sequence length', + default=512 + ) + + parser.add_argument( + '--max_predictions_per_seq', + type=int, + help='Specify the maximum number of masked words per sequence', + default=20 + ) + + parser.add_argument( + '--do_lower_case', + type=int, + help='Specify whether it is cased (0) or uncased (1) (any number greater than 0 will be treated as uncased)', + default=1 + ) + + parser.add_argument( + '--vocab_file', + type=str, + help='Specify absolute path to vocab file to use)' + ) + + parser.add_argument( + '--skip_wikiextractor', + type=int, + help='Specify whether to skip wikiextractor step 0=False, 1=True', + default=0 + ) + + parser.add_argument( + '--interactive_json_config_generator', + type=str, + help='Specify the action you want the app to take. e.g., generate vocab, segment, create tfrecords' + ) + + args = parser.parse_args() + main(args) diff --git a/PyTorch/LanguageModeling/BERT/data/bookcorpus/clean_and_merge_text.py b/PyTorch/LanguageModeling/BERT/data/bookcorpus/clean_and_merge_text.py deleted file mode 100644 index c32540c3..00000000 --- a/PyTorch/LanguageModeling/BERT/data/bookcorpus/clean_and_merge_text.py +++ /dev/null @@ -1,23 +0,0 @@ -# NVIDIA - -import glob -import os -import argparse - -parser = argparse.ArgumentParser(description='Cleaning and merge downloaded bookcorpus files') - -parser.add_argument('download_path', type=str) -parser.add_argument('output_file', type=str) - -args = parser.parse_args() - -download_path = args.download_path -output_file = args.output_file - -with open(output_file, "w") as ofile: - for filename in glob.glob('{}/*.txt'.format(download_path), recursive=True): - with open(filename, mode='r', encoding="utf-8-sig") as file: - for line in file: - if line.strip() != "": - ofile.write(line.strip() + " ") - ofile.write("\n\n") diff --git a/PyTorch/LanguageModeling/BERT/data/bookcorpus/download_bookcorpus.sh b/PyTorch/LanguageModeling/BERT/data/bookcorpus/download_bookcorpus.sh deleted file mode 100755 index 2a898976..00000000 --- a/PyTorch/LanguageModeling/BERT/data/bookcorpus/download_bookcorpus.sh +++ /dev/null @@ -1,9 +0,0 @@ -#! /bin/bash - -# Download books -mkdir -p ./download -python3 /workspace/bookcorpus/download_files.py --list /workspace/bookcorpus/url_list.jsonl --out ./download --trash-bad-count - -# Clean and prep (one book per line) -python3 ./clean_and_merge_text.py ./download bookcorpus.txt - diff --git a/PyTorch/LanguageModeling/BERT/data/create_datasets_from_start.sh b/PyTorch/LanguageModeling/BERT/data/create_datasets_from_start.sh index 9e562468..6f63ac39 100755 --- a/PyTorch/LanguageModeling/BERT/data/create_datasets_from_start.sh +++ b/PyTorch/LanguageModeling/BERT/data/create_datasets_from_start.sh @@ -1,38 +1,27 @@ #!/bin/bash -# Note: There are several directories created to make it clear what has been performed at each stage of preprocessing. The intermediate files may be useful if you want to further clean/prepare/augment the data for your own applications. -# NLTK was chosen as the default over spaCy simply due to speed of sentence segmentation on the large files. +# Download +python3 /workspace/bert/data/bertPrep.py --action download --dataset bookscorpus +python3 /workspace/bert/data/bertPrep.py --action download --dataset wikicorpus_en -MERGED_DIR=$1 -args="${*:2}" +python3 /workspace/bert/data/bertPrep.py --action download --dataset google_pretrained_weights # Includes vocab -source utils/config.sh +python3 /workspace/bert/data/bertPrep.py --action download --dataset squad +#python3 /workspace/bert/data/bertPrep.py --action download --dataset mrpc -mkdir -p ${MERGED_DIR} -corpus_file=${MERGED_DIR}/corpus.txt -## Shuffle the full corpus texts -if [ ! -z $3 ] -then - echo "Merging $args" - cat $args | sed "/^$/d" | shuf > $corpus_file -else - corpus_file=$2 -fi +# Properly format the text files +python3 /workspace/bert/data/bertPrep.py --action text_formatting --dataset bookscorpus +python3 /workspace/bert/data/bertPrep.py --action text_formatting --dataset wikicorpus_en -# Split articles into one-sentence-per-line format for use with BERT scripts -echo "Applying sentence segmentation to get one sentence per line" -mkdir -p ${MERGED_DIR}/final_text_file_single -python3 utils/sentence_segmentation_nltk.py $corpus_file ${MERGED_DIR}/final_text_file_single/corpus.segmented.nltk.txt -## Shard finalized text so that it has a chance of fitting in memory when creating pretraining data into hdf5 (choose appropriate number of shards for distributed training) -echo "Shard text files - size is approximate to prevent splitting an article across shards" -mkdir -p ${MERGED_DIR}/final_text_files_sharded -python3 utils/shard_text_input_file.py ${MERGED_DIR}/final_text_file_single/corpus.segmented.nltk.txt ${MERGED_DIR}/final_text_files_sharded/corpus.segmented.part. +# Shard the text files (group wiki+books then shard) +python3 /workspace/bert/data/bertPrep.py --action sharding --dataset books_wiki_en_corpus -# Convert sharded text files into hdf5 that are ready for BERT pretraining -echo "Creating hdf5 for each text shard" -mkdir -p ${MERGED_DIR}/hdf5_shards -export TARGET_DIR=${MERGED_DIR} -. utils/preprocessing_xargs_wrapper.sh ${N_PROCS_PREPROCESS} +# Create HDF5 files Phase 1 +python3 /workspace/bert/data/bertPrep.py --action create_hdf5_files --dataset books_wiki_en_corpus --max_seq_length 128 --max_predictions_per_seq 20 + + +# Create HDF5 files Phase 2 +python3 /workspace/bert/data/bertPrep.py --action create_hdf5_files --dataset books_wiki_en_corpus --max_seq_length 512 --max_predictions_per_seq 80 diff --git a/PyTorch/LanguageModeling/BERT/data/merge_datasets_after_creation.sh b/PyTorch/LanguageModeling/BERT/data/merge_datasets_after_creation.sh deleted file mode 100755 index 2a4ab8da..00000000 --- a/PyTorch/LanguageModeling/BERT/data/merge_datasets_after_creation.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - - -MERGED_DIR=$1 # e.g wikipedia+bookcorpus -INPUTFILES=$2 # directories with hdf5 files separated by comma -NUM_SHARDS=$3 - -source utils/config.sh - - -META_DIR=$MERGED_DIR/meta -mkdir -p ${MERGED_DIR} -mkdir -p ${META_DIR} - -echo "create mixed dataset ids" -echo "python utils/create_mixed_dataset_ids.py --input_files=${INPUTFILES} --num_output_shards=${NUM_SHARDS} --output_dir=${META_DIR} --random_seed=${SEED}" -python utils/create_mixed_dataset_ids.py --input_files=${INPUTFILES} --num_output_shards=${NUM_SHARDS} --output_dir=${META_DIR} --random_seed=${SEED} - - -echo "Creating hdf5 for each text shard" -mkdir -p ${MERGED_DIR}/hdf5_shards -echo "create mixed datasets with hdf5 files" -echo "python utils/create_mixed_dataset.py --input_files=${INPUTFILES} --output_dir=${MERGED_DIR}/hdf5_shards --lookup=${META_DIR}/lookup_table.pkl --indices_dir=${META_DIR} --index_range=0-${NUM_SHARDS} --random_seed=${SEED}" -python utils/create_mixed_dataset.py --input_files=${INPUTFILES} --output_dir=${MERGED_DIR}/hdf5_shards --lookup=${META_DIR}/lookup_table.pkl --indices_dir=${META_DIR} --index_range=0-$((NUM_SHARDS-1)) --random_seed=${SEED} - - -rm -rf ${META_DIR} - - diff --git a/PyTorch/LanguageModeling/BERT/data/utils/config.sh b/PyTorch/LanguageModeling/BERT/data/utils/config.sh deleted file mode 100755 index 3192fb04..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/config.sh +++ /dev/null @@ -1,24 +0,0 @@ -#! /bin/bash - -set -e - -USE_BERT_LARGE=true -MAX_SEQUENCE_LENGTH=512 -MAX_PREDICTIONS_PER_SEQUENCE=80 -MASKED_LM_PROB=0.15 -SEED=12345 -DUPE_FACTOR=5 -DO_LOWER_CASE="True" -N_LINES_PER_SHARD_APPROX=396000 # Default=396000 creates 256 shards - -N_PROCS_PREPROCESS=4 # Adjust this based on memory requirements and available number of cores - -BERT_BASE_DIR="/workspace/bert/vocab/uncased_L-12_H-768_A-12" -BERT_LARGE_DIR="/workspace/bert/vocab/uncased_L-24_H-1024_A-16" - -if [ "$USE_BERT_LARGE" = true ] ; then - VOCAB_FILE="${BERT_LARGE_DIR}/vocab.txt" -else - VOCAB_FILE="${BERT_BASE_DIR}/vocab.txt" -fi - diff --git a/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset.py b/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset.py deleted file mode 100644 index 7e63dc2e..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset.py +++ /dev/null @@ -1,160 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import logging -import os -import random -from io import open -import h5py -import numpy as np -from tqdm import tqdm, trange -import random -import collections -import math -import multiprocessing as mp -""" -mixing hdf5 shards with each other -""" - - -def shard_files(output_files, l_instance_ids, lookuptable, files): - - l_input_ids = [] - l_input_masks = [] - l_segment_ids = [] - l_masked_lm_positions = [] - l_masked_lm_ids = [] - l_next_sentence_labels = [] - - seq_len = 0 - pred_len = 0 - with h5py.File(files[0], 'r') as f: - seq_len = f['input_ids'].shape[1] - pred_len = f['masked_lm_positions'].shape[1] - - assert(seq_len > 0 and pred_len > 0) - for i, output_file in enumerate(output_files): - output_length = len(l_instance_ids[i]) - print("preparing to write {} instances to {}".format(output_length, output_file)) - input_ids = np.ones([output_length, seq_len], dtype=np.int32) - input_masks = np.ones([output_length, seq_len], dtype=np.int8) - segment_ids = np.ones([output_length, seq_len], dtype=np.int8) - masked_lm_positions = np.ones([output_length, pred_len], dtype=np.int32) - masked_lm_ids= np.ones([output_length, pred_len], dtype=np.int32) - next_sentence_labels = np.ones(output_length, dtype=np.int8) - l_input_ids.append(input_ids) - l_input_masks.append(input_masks) - l_segment_ids.append(segment_ids) - l_masked_lm_positions.append(masked_lm_positions) - l_masked_lm_ids.append(masked_lm_ids) - l_next_sentence_labels.append(next_sentence_labels) - for did, f in enumerate(tqdm(files)): - h5_f = h5py.File(f, 'r') - f_input_ids = h5_f['input_ids'][:] - f_input_masks = h5_f['input_mask'][:] - f_segment_ids = h5_f['segment_ids'][:] - f_masked_lm_positions = h5_f['masked_lm_positions'][:] - f_masked_lm_ids = h5_f['masked_lm_ids'][:] - f_next_sentence_labels = h5_f['next_sentence_labels'][:] - h5_f.close() - for out_i, out_file in enumerate(output_files): - instance_ids = l_instance_ids[out_i] - for l, idx in enumerate(instance_ids): - doc_id, line_id = lookuptable[idx] - if doc_id == did: - l_input_ids[out_i][l] = f_input_ids[line_id] - l_input_masks[out_i][l] = f_input_masks[line_id] - l_segment_ids[out_i][l] = f_segment_ids[line_id] - l_masked_lm_positions[out_i][l] = f_masked_lm_positions[line_id] - l_masked_lm_ids[out_i][l] = f_masked_lm_ids[line_id] - l_next_sentence_labels[out_i][l] = f_next_sentence_labels[line_id] - for out_i, out_file in enumerate(output_files): - output_length = len(l_input_ids[out_i]) - print("writing {} instances to {}".format(output_length, out_file)) - with h5py.File(out_file, 'w') as f: - f.create_dataset("input_ids", data=l_input_ids[out_i], dtype='i4', compression='gzip') - f.create_dataset("input_mask", data=l_input_masks[out_i], dtype='i1', compression='gzip') - f.create_dataset("segment_ids", data=l_segment_ids[out_i], dtype='i1', compression='gzip') - f.create_dataset("masked_lm_positions", data=l_masked_lm_positions[out_i], dtype='i4', compression='gzip') - f.create_dataset("masked_lm_ids", data=l_masked_lm_ids[out_i], dtype='i4', compression='gzip') - f.create_dataset("next_sentence_labels", data=l_next_sentence_labels[out_i], dtype='i1', compression='gzip') - - -def main(): - - parser = argparse.ArgumentParser() - ## Required parameters - parser.add_argument("--input_files", - default=None, - type=str, - required=True, - help="comma seperated list of file paths, each path can be either file or directory of files") - parser.add_argument("--output_dir", - default=None, - type=str, - required=True, - help="directory for output shards") - parser.add_argument("--lookup", - default=None, - type=str, - required=True, - help="path to lookup table") - parser.add_argument("--indices_dir", - default=None, - type=str, - required=True, - help="path to shuffled instance indices") - parser.add_argument("--index_range", - default=None, - type=str, - required=True, - help="index range of output files to be written out, e.g specify '0-100' for writing out 0.hdf5 , ..., 100.hdf5") - parser.add_argument('--random_seed', - type=int, - default=12345, - help="random seed for initialization") - - args = parser.parse_args() - - rng = random.Random(args.random_seed) - np.random.seed(args.random_seed) - - - input_paths = args.input_files.strip().split(',') - input_paths = [f for f in input_paths if f] - - input_files = [] - for path in input_paths: - if os.path.isfile(path): - assert (path.endswith('.hdf5')), "file must be hdf5 file" - input_files.append(path) - else: - assert os.path.isdir(path) - hdf5_files = [os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) and f.endswith('.hdf5')] - input_files.extend(hdf5_files) - - input_files.sort() - assert(os.path.isdir(args.output_dir)) - - - - print("loading indices file") - start_idx, end_idx= int(args.index_range.split('-')[0]), int(args.index_range.split('-')[1]) - index_files = [] - instance_ids = [] - for i in range(start_idx, end_idx + 1): - index_files.append(os.path.join(args.indices_dir, "indices_" + str(i) + ".npy")) - instance_ids.append( np.load(index_files[-1])) - - output_files = [os.path.join(args.output_dir, indices_file.split('.')[0].split('_')[-1] + ".hdf5") for indices_file in index_files] - print("output_files", output_files) - - print("loading lookup table") - lookup_table = np.load(args.lookup) - shard_files(output_files, instance_ids, lookup_table, input_files) - - - -if __name__ == "__main__": - main() - diff --git a/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset_ids.py b/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset_ids.py deleted file mode 100644 index 6d4e1b89..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/create_mixed_dataset_ids.py +++ /dev/null @@ -1,134 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import logging -import os -import random -from io import open -import h5py -import numpy as np -from tqdm import tqdm, trange -import random -import collections -import math -from tqdm import tqdm -import multiprocessing as mp -import pickle -import json -""" -mixing hdf5 shards with each other -""" -def load_and_prepare(input_files, num_shards): - - seq_len = None - pred_len = None - - input_lengths = [] - for input_file in input_files: - with h5py.File(input_file, 'r') as f: - input_lengths.append(len(f['input_ids'])) - if seq_len is None: - seq_len = f['input_ids'].shape[1] - pred_len = f['masked_lm_ids'].shape[1] - - assert (isinstance(seq_len, int) and isinstance(pred_len, int)) - - total_instances = sum(input_lengths) - n_inst_per_file = math.ceil(total_instances * 1.0 / num_shards) - permutation = np.random.permutation(total_instances) - - - instance_indices = [] - for i in range(0, num_shards): - start_pos = i * n_inst_per_file - end_pos = min((i+1) * n_inst_per_file, total_instances) - instance_indices.append(permutation[start_pos:end_pos]) - - return seq_len, pred_len, input_lengths, instance_indices - - - - -def main(): - - parser = argparse.ArgumentParser() - ## Required parameters - parser.add_argument("--input_files", - default=None, - type=str, - required=True, - help="comma seperated list of file paths, each path can be either file or directory of hdf5 files") - parser.add_argument("--num_output_shards", - default=None, - type=int, - required=True, - help="number of shards to be created. shards will be created as even as possible.") - parser.add_argument("--output_dir", - default=None, - type=str, - required=True, - help="directory for meta files") - parser.add_argument('--random_seed', - type=int, - default=12345, - help="random seed for initialization") - - args = parser.parse_args() - - rng = random.Random(args.random_seed) - np.random.seed(args.random_seed) - - - input_paths = args.input_files.strip().split(',') - input_paths = [f for f in input_paths if f] - - input_files = [] - for path in input_paths: - if os.path.isfile(path): - assert (path.endswith('.hdf5')), "file must be hdf5 file" - input_files.append(path) - else: - assert os.path.isdir(path) - hdf5_files = [os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) and f.endswith('.hdf5')] - input_files.extend(hdf5_files) - input_files.sort() - - assert(os.path.isdir(args.output_dir)) - - print("load and prepare") - seq_len, pred_len, input_lengths, output_inst_indices = load_and_prepare(input_files, args.num_output_shards) - print("preparing lookup table") - total_num_instances = sum(input_lengths) - out_2_in = dict() - length_so_far = 0 - for i, l in enumerate(input_lengths): - for j in range(l): - out_2_in[length_so_far + j] = (i, j) - length_so_far += input_lengths[i] - - - - output_files = [os.path.join(args.output_dir, "indices_" + str(i) + ".npy") for i in range(args.num_output_shards)] - print("save data") - - - with open(os.path.join(args.output_dir, 'lookup_table.pkl'), 'wb') as f: - pickle.dump(out_2_in, f) - - for i, out_file in enumerate(output_files): - np.save(out_file, output_inst_indices[i]) - - - meta = {'seq_len': seq_len, 'pred_len':pred_len} - - with open(os.path.join(args.output_dir, 'meta_data.pkl'), 'wb') as f: - pickle.dump(meta, f) - - - - - - - -if __name__ == "__main__": - main() diff --git a/PyTorch/LanguageModeling/BERT/data/utils/preprocessing.sh b/PyTorch/LanguageModeling/BERT/data/utils/preprocessing.sh deleted file mode 100755 index 64e63a9b..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/preprocessing.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /bin/bash - -SHARD_INDEX=${1} -INPUT_FILE="${TARGET_DIR}/final_text_files_sharded/corpus.segmented.part.${SHARD_INDEX}.txt" - -source /workspace/bert/data/utils/config.sh - -OUTPUT_DIR=${TARGET_DIR}/hdf5_shards -mkdir -p ${OUTPUT_DIR} - -OUTPUT_FILE="${OUTPUT_DIR}/${SHARD_INDEX}.hdf5" - -python /workspace/bert/create_pretraining_data.py \ - --input_file=${INPUT_FILE} \ - --output_file=${OUTPUT_FILE} \ - --vocab_file=${VOCAB_FILE} \ - --do_lower_case \ - --max_seq_length=${MAX_SEQUENCE_LENGTH} \ - --max_predictions_per_seq=${MAX_PREDICTIONS_PER_SEQUENCE} \ - --masked_lm_prob=${MASKED_LM_PROB} \ - --random_seed=${SEED} \ - --dupe_factor=${DUPE_FACTOR} - diff --git a/PyTorch/LanguageModeling/BERT/data/utils/preprocessing_xargs_wrapper.sh b/PyTorch/LanguageModeling/BERT/data/utils/preprocessing_xargs_wrapper.sh deleted file mode 100755 index 0e767cd0..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/preprocessing_xargs_wrapper.sh +++ /dev/null @@ -1,15 +0,0 @@ -#! /bin/bash - -source /workspace/bert/data/utils/config.sh - -SHARD_COUNT=0 -rm -rf ${TARGET_DIR}/xarg_list.txt -touch ${TARGET_DIR}/xarg_list.txt -for file in ${TARGET_DIR}/final_text_files_sharded/*; do - echo ${SHARD_COUNT} >> ${TARGET_DIR}/xarg_list.txt - SHARD_COUNT=$((SHARD_COUNT+1)) -done - -xargs -n 1 --max-procs=${N_PROCS_PREPROCESS} --arg-file=${TARGET_DIR}/xarg_list.txt /workspace/bert/data/utils/preprocessing.sh - -rm ${TARGET_DIR}/xarg_list.txt diff --git a/PyTorch/LanguageModeling/BERT/data/utils/sentence_segmentation_nltk.py b/PyTorch/LanguageModeling/BERT/data/utils/sentence_segmentation_nltk.py deleted file mode 100644 index fa237987..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/sentence_segmentation_nltk.py +++ /dev/null @@ -1,28 +0,0 @@ -# NVIDIA - -import argparse -import nltk -import os - -nltk.download('punkt') - -parser = argparse.ArgumentParser(description='Sentence Segmentation') - -parser.add_argument('input_file', type=str) -parser.add_argument('output_file', type=str) - -args = parser.parse_args() - -input_file = args.input_file -output_file = args.output_file - -doc_seperator = "\n" - -with open(input_file) as ifile: - with open(output_file, "w") as ofile: - for line in ifile: - if line != "\n": - sent_list = nltk.tokenize.sent_tokenize(line) - for sent in sent_list: - ofile.write(sent + "\n") - ofile.write(doc_seperator) diff --git a/PyTorch/LanguageModeling/BERT/data/utils/shard_text_input_file.py b/PyTorch/LanguageModeling/BERT/data/utils/shard_text_input_file.py deleted file mode 100644 index f436314e..00000000 --- a/PyTorch/LanguageModeling/BERT/data/utils/shard_text_input_file.py +++ /dev/null @@ -1,47 +0,0 @@ -# NVIDIA - -import os -import argparse - -parser = argparse.ArgumentParser(description='Dataset sharding') - -parser.add_argument('input_file', type=str) -parser.add_argument('output_file', type=str) - -args = parser.parse_args() - -input_file = args.input_file -output_file = args.output_file - -doc_seperator = "\n" - -line_buffer = [] -shard_size = 396000 # Approximate, will split at next article break -line_counter = 0 -shard_index = 0 - -ifile_lines = 0 -with open(input_file) as ifile: - for line in ifile: - ifile_lines += 1 - -print("Input file contains", ifile_lines, "lines.") - -iline_counter = 1 -with open(input_file) as ifile: - for line in ifile: - if line_counter < shard_size and iline_counter < ifile_lines: - line_buffer.append(line) - line_counter += 1 - iline_counter += 1 - elif line_counter >= shard_size and line != "\n" and iline_counter < ifile_lines: - line_buffer.append(line) - line_counter += 1 - iline_counter += 1 - else: - with open(output_file + str(shard_index) + ".txt", "w") as ofile: - for oline in line_buffer: - ofile.write(oline) - line_buffer = [] - line_counter = 0 - shard_index += 1 diff --git a/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/download_wikipedia.sh b/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/download_wikipedia.sh deleted file mode 100755 index b137d228..00000000 --- a/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/download_wikipedia.sh +++ /dev/null @@ -1,30 +0,0 @@ -#! /bin/bash - -WIKI_DUMP="https://dumps.wikimedia.org/enwiki/20190320/enwiki-20190320-pages-articles-multistream.xml.bz2" -N_PROCS_PREPROCESS=$(nproc) # Adjust this based on memory requirements and available number of cores - -# Download Wikipedia dump file -mkdir -p ./download - -# Not using --noclobber since it emits an error if exists (incompatible with bash 'set -e') -echo "Downloading Wikidump" -if [ ! -f ./download/wikidump.xml.bz2 ]; then - wget -O ./download/wikidump.xml.bz2 ${WIKI_DUMP} -fi - -# Extract dump -echo "Extracting Wikidump" -mkdir -p ./raw_data -if [ ! -f ./raw_data/wikidump.xml ]; then - pv ./download/wikidump.xml.bz2 | bunzip2 -kdc > ./raw_data/wikidump.xml -fi - -# Wikiextractor.py - Creates lots of folders/files in "doc format" -echo "Running Wikiextractor" -mkdir -p ./extracted_articles -/workspace/wikiextractor/WikiExtractor.py ./raw_data/wikidump.xml -b 1000M --processes ${N_PROCS_PREPROCESS} -o ./extracted_articles - -# Remove XML Tags and extraneous titles (since they are not sentences) -# Also clean to remove lines between paragraphs within article and use space-separated articles -echo "Cleaning and formatting files (one article per line)" -python3 ./remove_tags_and_clean.py ./extracted_articles ./wikipedia_corpus.txt diff --git a/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/remove_tags_and_clean.py b/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/remove_tags_and_clean.py deleted file mode 100644 index fb096b5c..00000000 --- a/PyTorch/LanguageModeling/BERT/data/wikipedia_corpus/remove_tags_and_clean.py +++ /dev/null @@ -1,39 +0,0 @@ -# NVIDIA - -import glob -import os -import argparse - -parser = argparse.ArgumentParser(description='Cleaning and merge downloaded bookcorpus files') - -parser.add_argument('extracted_articles_path', type=str) -parser.add_argument('output_file', type=str) - -args = parser.parse_args() - -extracted_articles_path = args.extracted_articles_path -output_file = args.output_file - -with open(output_file, "w") as ofile: - for dirname in glob.glob('{}/*/'.format(extracted_articles_path), recursive=False): - for filename in glob.glob(dirname + 'wiki_*', recursive=True): - print(filename) - article_lines = [] - article_open = False - - with open(filename, "r") as file: - for line in file: - if "" in line: - article_open = False - for oline in article_lines[1:]: - if oline != "\n": - ofile.write(oline.rstrip() + " ") - ofile.write("\n\n") - article_lines = [] - else: - if article_open: - article_lines.append(line) - - diff --git a/PyTorch/LanguageModeling/BERT/fused_adam_local.py b/PyTorch/LanguageModeling/BERT/fused_adam_local.py deleted file mode 100644 index 7afa76d1..00000000 --- a/PyTorch/LanguageModeling/BERT/fused_adam_local.py +++ /dev/null @@ -1,205 +0,0 @@ -import types -import importlib - -import math -import torch - -def warmup_cosine(x, warmup=0.002): - if x < warmup: - return x/warmup - return 0.5 * (1.0 + torch.cos(math.pi * x)) - -def warmup_constant(x, warmup=0.002): - if x < warmup: - return x/warmup - return 1.0 - -def warmup_linear(x, warmup=0.002): - if x < warmup: - return x/warmup - return 1.0 - x - -SCHEDULES = { - 'warmup_cosine':warmup_cosine, - 'warmup_constant':warmup_constant, - 'warmup_linear':warmup_linear, -} - -class FusedAdamBert(torch.optim.Optimizer): - - """Implements Adam algorithm. Currently GPU-only. Requires Apex to be installed via - ``python setup.py install --cuda_ext --cpp_ext``. - It has been proposed in `Adam: A Method for Stochastic Optimization`_. - Arguments: - params (iterable): iterable of parameters to optimize or dicts defining - parameter groups. - lr (float, optional): learning rate. (default: 1e-3) - betas (Tuple[float, float], optional): coefficients used for computing - running averages of gradient and its square. (default: (0.9, 0.999)) - eps (float, optional): term added to the denominator to improve - numerical stability. (default: 1e-8) - weight_decay (float, optional): weight decay (L2 penalty) (default: 0) - amsgrad (boolean, optional): whether to use the AMSGrad variant of this - algorithm from the paper `On the Convergence of Adam and Beyond`_ - (default: False) NOT SUPPORTED in FusedAdam! - eps_inside_sqrt (boolean, optional): in the 'update parameters' step, - adds eps to the bias-corrected second moment estimate before - evaluating square root instead of adding it to the square root of - second moment estimate as in the original paper. (default: False) - .. _Adam\: A Method for Stochastic Optimization: - https://arxiv.org/abs/1412.6980 - .. _On the Convergence of Adam and Beyond: - https://openreview.net/forum?id=ryQu7f-RZ - """ - -# def __init__(self, params, -# lr=1e-3, bias_correction = True, -# betas=(0.9, 0.999), eps=1e-8, eps_inside_sqrt = False, -# weight_decay=0., max_grad_norm=0., amsgrad=False): - - def __init__(self, params, lr=1e-3, warmup=-1, t_total=-1, bias_correction=False, betas=(0.9, 0.999), schedule='warmup_linear', - eps=1e-6, eps_inside_sqrt = False, weight_decay=0., max_grad_norm=1.0, amsgrad=False): - - - global fused_adam_cuda - fused_adam_cuda = importlib.import_module("fused_adam_cuda") - - if amsgrad: - raise RuntimeError('FusedAdam does not support the AMSGrad variant.') - defaults = dict(lr=lr, bias_correction=bias_correction, - betas=betas, eps=eps, weight_decay=weight_decay, - max_grad_norm=max_grad_norm) - super(FusedAdamBert, self).__init__(params, defaults) - print("LOCAL FUSED ADAM") - self.eps_mode = 0 if eps_inside_sqrt else 1 - self.schedule = schedule - self.t_total = t_total - self.warmup = warmup - - def get_lr(self): - lr = [] - for group in self.param_groups: - for p in group['params']: - state = self.state[p] - if len(state) == 0: - return [0] - if group['t_total'] != -1: - schedule_fct = SCHEDULES[group['schedule']] - lr_scheduled = group['lr'] * schedule_fct(state['step']/group['t_total'], group['warmup']) - else: - lr_scheduled = group['lr'] - lr.append(lr_scheduled) - print("LR {}".format(lr_scheduled)) - return lr - - def step(self, closure=None, grads=None, output_params=None, scale=1., grad_norms=None): - """Performs a single optimization step. - Arguments: - closure (callable, optional): A closure that reevaluates the model - and returns the loss. - grads (list of tensors, optional): weight gradient to use for the - optimizer update. If gradients have type torch.half, parameters - are expected to be in type torch.float. (default: None) - output params (list of tensors, optional): A reduced precision copy - of the updated weights written out in addition to the regular - updated weights. Have to be of same type as gradients. (default: None) - scale (float, optional): factor to divide gradient tensor values - by before applying to weights. (default: 1) - """ - loss = None - if closure is not None: - loss = closure() - - if grads is None: - grads_group = [None]*len(self.param_groups) - # backward compatibility - # assuming a list/generator of parameter means single group - elif isinstance(grads, types.GeneratorType): - grads_group = [grads] - elif type(grads[0])!=list: - grads_group = [grads] - else: - grads_group = grads - - if output_params is None: - output_params_group = [None]*len(self.param_groups) - elif isinstance(output_params, types.GeneratorType): - output_params_group = [output_params] - elif type(output_params[0])!=list: - output_params_group = [output_params] - else: - output_params_group = output_params - - if grad_norms is None: - grad_norms = [None]*len(self.param_groups) - - #Compute global norm - global_norm = 0.0 - for group, grads_this_group, output_params_this_group, grad_norm in zip(self.param_groups, grads_group, - output_params_group, grad_norms): - global_norm = (global_norm ** 2 + grad_norm ** 2) ** 0.5 - - for group, grads_this_group, output_params_this_group, grad_norm in zip(self.param_groups, grads_group, output_params_group, grad_norms): - if grads_this_group is None: - grads_this_group = [None]*len(group['params']) - if output_params_this_group is None: - output_params_this_group = [None]*len(group['params']) - - # compute combined scale factor for this group - combined_scale = scale - if group['max_grad_norm'] > 0: - # norm is in fact norm*scale - clip = ((global_norm / scale) + 1e-6) / group['max_grad_norm'] - if clip > 1: - combined_scale = clip * scale - - bias_correction = 1 if group['bias_correction'] else 0 - - for p, grad, output_param in zip(group['params'], grads_this_group, output_params_this_group): - #note: p.grad should not ever be set for correct operation of mixed precision optimizer that sometimes sends None gradients - if p.grad is None and grad is None: - continue - if grad is None: - grad = p.grad.data - if grad.is_sparse: - raise RuntimeError('FusedAdam does not support sparse gradients, please consider SparseAdam instead') - - state = self.state[p] - - # State initialization - if len(state) == 0: - state['step'] = 0 - # Exponential moving average of gradient values - state['exp_avg'] = torch.zeros_like(p.data) - # Exponential moving average of squared gradient values - state['exp_avg_sq'] = torch.zeros_like(p.data) - - exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] - beta1, beta2 = group['betas'] - - state['step'] += 1 - - out_p = torch.tensor([], dtype = torch.float) if output_param is None else output_param - #Changes sharath - - schedule_fct = SCHEDULES[self.schedule] - #schedule_fct(state['step']/self.t_total, self.warmup) - #step_lr = group['lr'] * schedule_fct(state['step']/self.t_total, self.warmup) - #step_lr = group['lr'] * scale#schedule_fct(state['step']/self.t_total, self.warmup)# schedule_fct(state['step']/group['t_total'], group['warmup']) - #print(scale, step_lr) - #print(group['lr']) - fused_adam_cuda.adam(p.data, - out_p, - exp_avg, - exp_avg_sq, - grad, - group['lr'], #step_lr,#group['lr'], - beta1, - beta2, - group['eps'], - combined_scale, - state['step'], - self.eps_mode, - bias_correction, - group['weight_decay']) - return loss diff --git a/PyTorch/LanguageModeling/BERT/modeling.py b/PyTorch/LanguageModeling/BERT/modeling.py index ac9355b0..8d644821 100644 --- a/PyTorch/LanguageModeling/BERT/modeling.py +++ b/PyTorch/LanguageModeling/BERT/modeling.py @@ -35,6 +35,11 @@ from torch.utils import checkpoint from file_utils import cached_path +from torch.nn import Module +from torch.nn.parameter import Parameter +import torch.nn.functional as F +import torch.nn.init as init + logger = logging.getLogger(__name__) PRETRAINED_MODEL_ARCHIVE_MAP = { @@ -111,14 +116,27 @@ def load_tf_weights_in_bert(model, tf_checkpoint_path): return model +@torch.jit.script +def f_gelu(x): + return x * 0.5 * (1.0 + torch.erf(x / 1.41421)) + +@torch.jit.script +def bias_gelu(bias, y): + x = bias + y + return x * 0.5 * (1.0 + torch.erf(x / 1.41421)) + +@torch.jit.script +def bias_tanh(bias, y): + x = bias + y + return torch.tanh(x) + def gelu(x): """Implementation of the gelu activation function. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see https://arxiv.org/abs/1606.08415 """ - return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) - + return f_gelu(x) def swish(x): return x * torch.sigmoid(x) @@ -126,6 +144,53 @@ def swish(x): ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish} +class LinearActivation(Module): + r"""Fused Linear and activation Module. + """ + __constants__ = ['bias'] + + def __init__(self, in_features, out_features, act='gelu', bias=True): + super(LinearActivation, self).__init__() + self.in_features = in_features + self.out_features = out_features + self.fused_gelu = False + self.fused_tanh = False + if isinstance(act, str) or (sys.version_info[0] == 2 and isinstance(act, unicode)): + if bias and act == 'gelu': + self.fused_gelu = True + elif bias and act == 'tanh': + self.fused_tanh = True + else: + self.act_fn = ACT2FN[act] + else: + self.act_fn = act + self.weight = Parameter(torch.Tensor(out_features, in_features)) + if bias: + self.bias = Parameter(torch.Tensor(out_features)) + else: + self.register_parameter('bias', None) + self.reset_parameters() + + def reset_parameters(self): + init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + if self.bias is not None: + fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight) + bound = 1 / math.sqrt(fan_in) + init.uniform_(self.bias, -bound, bound) + + def forward(self, input): + if self.fused_gelu: + return bias_gelu(self.bias, F.linear(input, self.weight, None)) + elif self.fused_tanh: + return bias_tanh(self.bias, F.linear(input, self.weight, None)) + else: + return self.act_fn(F.linear(input, self.weight, self.bias)) + + def extra_repr(self): + return 'in_features={}, out_features={}, bias={}'.format( + self.in_features, self.out_features, self.bias is not None + ) + class BertConfig(object): """Configuration class to store the configuration of a `BertModel`. @@ -216,7 +281,11 @@ class BertConfig(object): return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" try: - from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm + import apex + #apex.amp.register_half_function(apex.normalization.fused_layer_norm, 'FusedLayerNorm') + import apex.normalization + #apex.amp.register_float_function(apex.normalization.FusedLayerNorm, 'forward') + BertLayerNorm = apex.normalization.FusedLayerNorm except ImportError: print("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.") class BertLayerNorm(nn.Module): @@ -281,29 +350,35 @@ class BertSelfAttention(nn.Module): self.value = nn.Linear(config.hidden_size, self.all_head_size) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.softmax = nn.Softmax(dim=-1) def transpose_for_scores(self, x): new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) x = x.view(*new_x_shape) return x.permute(0, 2, 1, 3) + def transpose_key_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 3, 1) + def forward(self, hidden_states, attention_mask): mixed_query_layer = self.query(hidden_states) mixed_key_layer = self.key(hidden_states) mixed_value_layer = self.value(hidden_states) query_layer = self.transpose_for_scores(mixed_query_layer) - key_layer = self.transpose_for_scores(mixed_key_layer) + key_layer = self.transpose_key_for_scores(mixed_key_layer) value_layer = self.transpose_for_scores(mixed_value_layer) # Take the dot product between "query" and "key" to get the raw attention scores. - attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + attention_scores = torch.matmul(query_layer, key_layer) attention_scores = attention_scores / math.sqrt(self.attention_head_size) # Apply the attention mask is (precomputed for all layers in BertModel forward() function) attention_scores = attention_scores + attention_mask # Normalize the attention scores to probabilities. - attention_probs = nn.Softmax(dim=-1)(attention_scores) + attention_probs = self.softmax(attention_scores) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. @@ -345,15 +420,10 @@ class BertAttention(nn.Module): class BertIntermediate(nn.Module): def __init__(self, config): super(BertIntermediate, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.intermediate_size) - if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)): - self.intermediate_act_fn = ACT2FN[config.hidden_act] - else: - self.intermediate_act_fn = config.hidden_act + self.dense_act = LinearActivation(config.hidden_size, config.intermediate_size, act=config.hidden_act) def forward(self, hidden_states): - hidden_states = self.dense(hidden_states) - hidden_states = self.intermediate_act_fn(hidden_states) + hidden_states = self.dense_act(hidden_states) return hidden_states @@ -449,31 +519,24 @@ class BertEncoder(nn.Module): class BertPooler(nn.Module): def __init__(self, config): super(BertPooler, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.activation = nn.Tanh() + self.dense_act = LinearActivation(config.hidden_size, config.hidden_size, act="tanh") def forward(self, hidden_states): # We "pool" the model by simply taking the hidden state corresponding # to the first token. first_token_tensor = hidden_states[:, 0] - pooled_output = self.dense(first_token_tensor) - pooled_output = self.activation(pooled_output) + pooled_output = self.dense_act(first_token_tensor) return pooled_output class BertPredictionHeadTransform(nn.Module): def __init__(self, config): super(BertPredictionHeadTransform, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)): - self.transform_act_fn = ACT2FN[config.hidden_act] - else: - self.transform_act_fn = config.hidden_act + self.dense_act = LinearActivation(config.hidden_size, config.hidden_size, act=config.hidden_act) self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12) def forward(self, hidden_states): - hidden_states = self.dense(hidden_states) - hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.dense_act(hidden_states) hidden_states = self.LayerNorm(hidden_states) return hidden_states @@ -493,7 +556,9 @@ class BertLMPredictionHead(nn.Module): def forward(self, hidden_states): hidden_states = self.transform(hidden_states) + torch.cuda.nvtx.range_push("decoder input.size() = {}, weight.size() = {}".format(hidden_states.size(), self.decoder.weight.size())) hidden_states = self.decoder(hidden_states) + self.bias + torch.cuda.nvtx.range_pop() return hidden_states @@ -1247,3 +1312,4 @@ class BertForQuestionAnswering(BertPreTrainedModel): return total_loss else: return start_logits, end_logits + diff --git a/PyTorch/LanguageModeling/BERT/optimization.py b/PyTorch/LanguageModeling/BERT/optimization.py index 1a9ade8f..a47fb490 100644 --- a/PyTorch/LanguageModeling/BERT/optimization.py +++ b/PyTorch/LanguageModeling/BERT/optimization.py @@ -21,6 +21,13 @@ from torch.optim.optimizer import required from torch.nn.utils import clip_grad_norm_ #from fused_adam_local import FusedAdam from apex.optimizers import FusedAdam +from apex.multi_tensor_apply import multi_tensor_applier +import amp_C +multi_tensor_l2norm = amp_C.multi_tensor_l2norm +lamb_compute_update = amp_C.multi_tensor_lamb_stage1_cuda +lamb_apply_update = amp_C.multi_tensor_lamb_stage2_cuda +scale = amp_C.multi_tensor_scale + def warmup_cosine(x, warmup=0.002): if x < warmup: @@ -35,17 +42,235 @@ def warmup_constant(x, warmup=0.002): def warmup_linear(x, warmup=0.002): if x < warmup: return x/warmup - # return (1.0 - x) + return max((x - 1. )/ (warmup - 1.), 0.) + +def warmup_poly(x, warmup=0.002, degree=0.5): + if x < warmup: + return x/warmup + return (1.0 - x)**degree - return max((x - 1. )/ (warmup - 1.), 0.) SCHEDULES = { 'warmup_cosine':warmup_cosine, 'warmup_constant':warmup_constant, 'warmup_linear':warmup_linear, + 'warmup_poly':warmup_poly, } +class BertLAMB(Optimizer): + """Implements BERT version of LAMB algorithm. + Params: + lr: learning rate + warmup: portion of t_total for the warmup, -1 means no warmup. Default: -1 + t_total: total number of training steps for the learning + rate schedule, -1 means constant learning rate. Default: -1 + schedule: schedule to use for the warmup (see above). Default: 'warmup_linear' + b1: LAMBs b1. Default: 0.9 + b2: LAMBs b2. Default: 0.999 + e: LAMBs epsilon. Default: 1e-6 + weight_decay: Weight decay. Default: 0.01 + max_grad_norm: Maximum global norm for the gradients. Default: 1.0 + """ + def __init__(self, params, lr=required, warmup=-1, t_total=-1, schedule='warmup_poly', + b1=0.9, b2=0.999, e=1e-6, weight_decay=0.01, + max_grad_norm=1.0): + if lr is not required and lr < 0.0: + raise ValueError("Invalid learning rate: {} - should be >= 0.0".format(lr)) + if schedule not in SCHEDULES: + raise ValueError("Invalid schedule parameter: {}".format(schedule)) + if not 0.0 <= warmup < 1.0 and not warmup == -1: + raise ValueError("Invalid warmup: {} - should be in [0.0, 1.0[ or -1".format(warmup)) + if not 0.0 <= b1 < 1.0: + raise ValueError("Invalid b1 parameter: {} - should be in [0.0, 1.0[".format(b1)) + if not 0.0 <= b2 < 1.0: + raise ValueError("Invalid b2 parameter: {} - should be in [0.0, 1.0[".format(b2)) + if not e >= 0.0: + raise ValueError("Invalid epsilon value: {} - should be >= 0.0".format(e)) + defaults = dict(lr=lr, schedule=schedule, warmup=warmup, t_total=t_total, + b1=b1, b2=b2, e=e, weight_decay=weight_decay, + max_grad_norm=max_grad_norm) + super(BertLAMB, self).__init__(params, defaults) + self.step_count = 0 + self.b1 = b1 + self.b2 = b2 + self.epsilon = e + self.max_global_grad_norm = max_grad_norm + self.learning_rate = lr + self.schedule = schedule + self.warmup = warmup + self.max_steps = t_total + self.updates_created=False + + def get_lr(self): + lr = [] + for group in self.param_groups: + for p in group['params']: + state = self.state[p] + if len(state) == 0: + return [0] + if group['t_total'] != -1: + schedule_fct = SCHEDULES[group['schedule']] + lr_scheduled = group['lr'] * schedule_fct(state['step']/group['t_total'], group['warmup']) + else: + lr_scheduled = group['lr'] + lr.append(lr_scheduled) + return lr + + def apply_gradients(self, dummy_overflow_buf, lr_scheduled, per_param_decay, grad_list, param_list, momentum, velocity, update): + # Compute global gradient norm + global_grad_norm = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [grad_list], + False)[0].item() + + # Compute per parameter norm + param_norms = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [param_list], + True)[1] + + # Compute LAMB update + multi_tensor_applier( + lamb_compute_update, + dummy_overflow_buf, + [grad_list, param_list, momentum, velocity, update], + torch.cuda.FloatTensor(per_param_decay), + self.step_count, + self.b1, + self.b2, + self.epsilon, + global_grad_norm, + self.max_global_grad_norm, + ) + + # Computer per parameter update norm + update_norms = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [update], + True)[1] + + # Apply LAMB update on parameters + multi_tensor_applier( + lamb_apply_update, + dummy_overflow_buf, + [param_list, update], + param_norms, + update_norms, + lr_scheduled, + ) + + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + check = 1#torch.norm(all_grads, 2) + + grad_list = [] + param_list = [] + per_param_decay = [] + momentum = [] + velocity = [] + + fp16_grad_list = [] + fp16_from_fp32_param_list = [] + fp32_param_list = [] + fp16_per_param_decay = [] + fp16_momentum = [] + fp16_velocity = [] + + if not self.updates_created: + self.update = [] + self.fp16_update = [] + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead') + + state = self.state[p] + + # State initialization + if len(state) == 0: + # Keep step here for compatibility with earlier resume from checkpoint + state['step'] = 0 + # Exponential moving average of gradient values + state['momentum'] = torch.zeros_like(p.data, dtype=torch.float32) + # Exponential moving average of squared gradient values + state['velocity'] = torch.zeros_like(p.data, dtype=torch.float32) + # fp32 master weights + if 'master_param' not in state.keys() and p.type() == 'torch.cuda.HalfTensor': + state['master_param'] = p.detach().clone().float() + + # ensure these 3 are float tensors + if state['momentum'].type() != 'torch.cuda.FloatTensor': + state['momentum'] = state['momentum'].float() + if state['velocity'].type() != 'torch.cuda.FloatTensor': + state['velocity'] = state['velocity'].float() + if 'master_param' in state.keys() and state['master_param'].type() != 'torch.cuda.FloatTensor': + state['master_param'] = state['master_param'].float() + + # Append all params, gradients, decays, velocity, momentum and updates to a list + if p.type() == 'torch.cuda.HalfTensor': + fp16_grad_list.append(grad) + fp32_param_list.append(state['master_param']) + fp16_from_fp32_param_list.append(p.data) + fp16_per_param_decay.append(group['weight_decay']) + fp16_momentum.append(state["momentum"]) + fp16_velocity.append(state["velocity"]) + if not self.updates_created: + #self.fp16_update.append(torch.empty_like(p.data, dtype=torch.float32)) + # Use fp16 weights as temporary buffer for update term. + # This is safe because fp16 weights are overwritten after apply_gradients + self.fp16_update.append(p.data) + else: + grad_list.append(grad) + param_list.append(p.data) + per_param_decay.append(group['weight_decay']) + momentum.append(state["momentum"]) + velocity.append(state["velocity"]) + if not self.updates_created: + self.update.append(torch.empty_like(p.data)) + state['step'] += 1 + self.updates_created=True + update = self.update + fp16_update = self.fp16_update + + self.step_count = state['step'] + # Calculate learning rate from input schedule + # if self.max_steps != -1: + schedule_fct = SCHEDULES[self.schedule] + lr_scheduled = self.learning_rate * schedule_fct(self.step_count / self.max_steps, self.warmup) + if torch.distributed.get_rank() == 0: + print("Step {} LR {}".format(self.step_count, lr_scheduled)) + # else: + # lr_scheduled = self.learning_rate + + overflow_buf = torch.cuda.IntTensor([0]) + + if len(grad_list) > 0: + self.apply_gradients(overflow_buf, lr_scheduled, per_param_decay, grad_list, param_list, momentum, velocity, update) + if len(fp16_grad_list) > 0: + self.apply_gradients(overflow_buf, lr_scheduled, fp16_per_param_decay, fp16_grad_list, fp32_param_list, fp16_momentum, fp16_velocity, fp16_update) + multi_tensor_applier( + scale, + overflow_buf, + [fp32_param_list, fp16_from_fp32_param_list], + 1.) + + return loss + class BertAdam(Optimizer): """Implements BERT version of Adam algorithm with weight decay fix. Params: @@ -165,54 +390,3 @@ class BertAdam(Optimizer): return loss -# ======================================================================= -class BertAdam_FP16(FusedAdam): - """Implements BERT version of Adam algorithm with weight decay fix. - Params: - lr: learning rate - warmup: portion of t_total for the warmup, -1 means no warmup. Default: -1 - t_total: total number of training steps for the learning - rate schedule, -1 means constant learning rate. Default: -1 - schedule: schedule to use for the warmup (see above). Default: 'warmup_linear' - b1: Adams b1. Default: 0.9 - b2: Adams b2. Default: 0.999 - e: Adams epsilon. Default: 1e-6 - weight_decay: Weight decay. Default: 0.01 - max_grad_norm: Maximum norm for the gradients (-1 means no clipping). Default: 1.0 - """ - def __init__(self, params, lr, warmup=-1, t_total=-1, bias_correction=False, schedule='warmup_linear', - b1=0.9, b2=0.999, e=1e-6, weight_decay=0.01, - max_grad_norm=1.0): - if not lr >= 0.0: - raise ValueError("Invalid learning rate: {} - should be >= 0.0".format(lr)) - if schedule not in SCHEDULES: - raise ValueError("Invalid schedule parameter: {}".format(schedule)) - if not 0.0 <= warmup < 1.0 and not warmup == -1: - raise ValueError("Invalid warmup: {} - should be in [0.0, 1.0[ or -1".format(warmup)) - if not 0.0 <= b1 < 1.0: - raise ValueError("Invalid b1 parameter: {} - should be in [0.0, 1.0[".format(b1)) - if not 0.0 <= b2 < 1.0: - raise ValueError("Invalid b2 parameter: {} - should be in [0.0, 1.0[".format(b2)) - if not e >= 0.0: - raise ValueError("Invalid epsilon value: {} - should be >= 0.0".format(e)) - # defaults = dict(lr=lr, schedule=schedule, warmup=warmup, t_total=t_total, - # b1=b1, b2=b2, e=e, weight_decay=weight_decay, - # max_grad_norm=max_grad_norm) - super(BertAdam_FP16, self).__init__(params, lr=lr, bias_correction=bias_correction, betas=(b1, b2), eps=e, weight_decay=weight_decay, max_grad_norm=max_grad_norm)#defaults) - - def get_lr(self): - lr = [] - for group in self.param_groups: - for p in group['params']: - state = self.state[p] - if len(state) == 0: - print("returning", state) - return [0] - if group['t_total'] != -1: - schedule_fct = SCHEDULES[group['schedule']] - lr_scheduled = group['lr'] * schedule_fct(state['step']/group['t_total'], group['warmup']) - else: - lr_scheduled = group['lr'] - lr.append(lr_scheduled) - print("LR {}".format(lr_scheduled)) - return lr diff --git a/PyTorch/LanguageModeling/BERT/run.sub b/PyTorch/LanguageModeling/BERT/run.sub new file mode 100644 index 00000000..1f03d9ff --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/run.sub @@ -0,0 +1,203 @@ +#!/bin/bash +#SBATCH -p batch # partition +#SBATCH -N 1 # number of nodes +#SBATCH -t 1:30:00 # wall time +#SBATCH -J "bert_pyt_lamb" # job name +#SBATCH --exclusive # exclusive node access +#SBATCH --mem=0 # all mem avail +#SBATCH --ntasks-per-node=16 # max 8 tasks per machine (one task per gpu) - Exception for pytorch// srun launch with -n1 +#SBATCH --threads-per-core=2 # HT is on +#SBATCH --cpus-per-task=40 # Not used yet (to reach perf pytorch might need overcommit) +#SBATCH --overcommit # Needed for pytorch +#SBATCH --mail-user=sharatht@nvidia.com +#SBATCH --mail-type=END +##SBATCH --deadline=$(date -d '+72 hours' '+%FT%T') + +##SBATCH --reservation mlperf # reservation name +##SBATCH --output=./logs/pytorch_%j.out +##SBATCH --exclude=sc-sdgx-[394,397] # targeting nodes with mask until the constraints are implemented +##SBATCH -w sc-sdgx-[377-388],sc-sdgx-[394-408] # avail pod12 +##SBATCH -C pod14 # constraint (not implemented yet) +##SBATCH --ntasks-per-socket=4 # Not used (our slurm does not have sockets defined) + + +## Your data, your container and its volumes +## Your data, your container and its volumes +set -x +DATESTAMP=${DATESTAMP:-`date +'%y-%m-%d-%H-%M-%S-%N'`} +BENCHMARK=${BENCHMARK:-"bert"} +FRAMEWORK=${FRAMEWORK:-"pytorch"} +BENCHMARK_NAME==${FRAMEWORK:-"bert"} +JOBNAME=${JOBNAME:-"bert_lamb_phase1_96n_wiki+books_only_fast_lamb_O1_run_1337"} +#.$DATESTAMP +# Create results directory + +#DATADIR=${DATADIR:-"/raid/datasets/bert/hdf5/shard_1472_test_split_10/seq_128_pred_20_dupe_5/training"} +#DATADIR_PHASE2=${DATADIR_PHASE2:-"/raid/datasets/bert/hdf5/shard_1472_test_split_10/seq_512_pred_80_dupe_5/training"} +DATADIR="/raid/datasets/bert/hdf5/shard_1472_test_split_10/seq_128_pred_20_dupe_5/training" +DATADIR_PHASE2="/raid/datasets/bert/hdf5/shard_1472_test_split_10/seq_512_pred_80_dupe_5/training" +#BOOKS_DIR=/raid/datasets/seq_512_pred_80_dupe_5_shard_256 +VOCAB_PATH=${VOCAB_PATH:-"/raid/datasets/bert_vocab/vocab.txt"} +DATASET=${DATASET:-"coco/coco-2014"} +CODEDIR=${CODEDIR:="bert_pyt/tree/sharatht/fast_lamb_ci_runs"} +CONT=${CONT:-"gitlab-master.nvidia.com/dl/JoC/bert_pyt:bert_pyt"} +LOGDIR=${LOGDIR:-"/raid/results/$BENCHMARK"} +NEXP=${NEXP:-1} +SEED=${SEED:-$(od -A n -t d -N 3 /dev/urandom)} +#CHECKPOINT_DIR=${CHECKPOINT_DIR:-"/gpfs/fs1/svcnvdlfw/7108495/results/output"} +CHECKPOINT_DIR="/gpfs/fs1/svcnvdlfw/7588296/results/output" +## Load system-specific parameters for benchmark +DGXSYSTEM=${DGXSYSTEM:-"DGX1"} +if [[ ! -f "config_${DGXSYSTEM}.sh" ]]; then + echo "Unknown system, assuming DGX1" + DGXSYSTEM="DGX1" +fi +source config_${DGXSYSTEM}.sh + +IBDEVICES=${IBDEVICES:-$DGXIBDEVICES} + +## Check whether we are running in a slurm env +INSLURM=1 +if [[ -z "$SLURM_JOB_ID" ]]; then + INSLURM=0 + export SLURM_JOB_ID="${DATESTAMP}" + export SLURM_NNODES=1 +else + env | grep SLURM +fi +if [[ -z "SLURM_JOB_ID" || $SLURM_NNODES -eq 1 ]]; then + # don't need IB if not multi-node + export IBDEVICES="" +fi + +# Create results directory +LOGFILE_BASE="${LOGDIR}/${DATESTAMP}" +mkdir -p $(dirname "${LOGFILE_BASE}") + +## Docker params +CONTVOLS="-v $DATADIR:/workspace/data -v $LOGDIR:/results -v $CHECKPOINT_DIR:/checkpoints -v $DATADIR_PHASE2:/workspace/data_phase2" +NV_GPU="${NVIDIA_VISIBLE_DEVICES:-$(seq 0 $((${SLURM_NTASKS_PER_NODE:-${DGXNGPU}}-1)) | tr '\n' ',' | sed 's/,$//')}" +DOCKEREXEC="env NV_GPU=${NV_GPU} nvidia-docker run --init --rm --net=host --uts=host --ipc=host --ulimit stack=67108864 --ulimit memlock=-1 --name=cont_${SLURM_JOB_ID} --security-opt seccomp=unconfined $IBDEVICES" + +## Get version of the OS +export MLPERF_HOST_OS="$(cat /etc/issue | head -1 | cut -f1-3 -d" ") / $(cat /etc/dgx-release | grep -E "DGX_PRETTY_NAME|DGX_OTA_VERSION" |cut -f2 -d= |cut -f2 -d '"' |paste -sd' ')" + +## Prep run and launch +MASTER_IP=`getent hosts \`hostname\` | cut -d ' ' -f1` +PORT=$((4242 + RANDOM%1000)) +SSH='' +SRUN='' +if [[ $INSLURM -eq 1 ]]; then + hosts=( `scontrol show hostname |tr "\n" " "` ) + SSH='ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no $hostn' + SRUN='srun --mem=0 -N 1 -n 1 -w $hostn' +else + hosts=( `hostname` ) +fi + +# Pull latest image +if [[ "${PULL}" != "0" ]]; then + DOCKERPULL="docker pull $CONT" + pids=(); + for hostn in ${hosts[@]}; do + timeout -k 600s 600s \ + $(eval echo $SRUN) $DOCKERPULL & + pids+=($!); + done + wait "${pids[@]}" + success=$? ; if [ $success -ne 0 ]; then echo "ERR: Image pull failed."; exit $success ; fi +fi + +# Test the base container launch +pids=(); +for hostn in ${hosts[@]}; do + timeout -k 600s 600s \ + $(eval echo $SRUN) $DOCKEREXEC $CONT python -c 'import torch; print("Found",torch.cuda.device_count(),"CUDA GPUs")' & + pids+=($!); +done +wait "${pids[@]}" +success=$? ; if [ $success -ne 0 ]; then echo "ERR: Base container launch failed."; exit $success ; fi + +# Launch containers +pids=(); rets=() +for hostn in ${hosts[@]}; do + $(eval echo $SSH) $DOCKEREXEC $CONTVOLS $CONT sleep infinity & + pids+=($!); rets+=($?); +done +success=0; for s in ${rets[@]}; do ((success+=s)); done ; if [ $success -ne 0 ]; then echo "ERR: Container launch failed."; exit $success ; fi +sleep 30 # Making sure containers have time to launch + +# Disable compat check from further running +pids=(); rets=() +for hostn in ${hosts[@]}; do + $(eval echo $SSH) docker exec cont_${SLURM_JOB_ID} rm -f /etc/shinit & + pids+=($!); +done +wait "${pids[@]}" + +# Run benchmarks + +export SEED +export NEXP +for nrun in `seq 1 $NEXP`; do + ( + echo "Beginning trial $nrun of $NEXP" + + export VARS=( + "-e" "SLURM_NNODES=$SLURM_NNODES" + "-e" "MLPERF_HOST_OS" + ) + + + ## Clear RAM cache dentries and inodes + echo "Clearing caches" + pids=(); rets=() + for hostn in ${hosts[@]}; do + if [[ $INSLURM -eq 1 ]]; then + $(eval echo $SSH) bash -c 'sync && sudo /sbin/sysctl vm.drop_caches=3' & + else + docker run --init --rm --privileged --entrypoint bash $CONT -c "sync && echo 3 > /proc/sys/vm/drop_caches || exit 1" & + fi + pids+=($!); rets+=($?); + done + wait "${pids[@]}" + success=0; for s in ${rets[@]}; do ((success+=s)); done ; if [ $success -ne 0 ]; then echo "ERR: Cache clearing failed."; exit $success ; fi + + ## Launching benchmark + pids=(); + export MULTI_NODE='' + for h in `seq 0 $((SLURM_NNODES-1))`; do + hostn="${hosts[$h]}" + echo "Launching on node $hostn" + if [[ $SLURM_NNODES -gt 1 ]]; then + export MULTI_NODE=" --nnodes=$SLURM_NNODES --node_rank=$h --master_addr=$MASTER_IP --master_port=$PORT" + else + export MULTI_NODE=" --master_port=$PORT" + fi + export DOCKERENV=( + "-e" "DGXSYSTEM=$DGXSYSTEM" + "-e" "MULTI_NODE=$MULTI_NODE" + "-e" "SEED=$SEED" + "-e" "SLURM_JOB_ID=$SLURM_JOB_ID" + "-e" "SLURM_NTASKS_PER_NODE=$SLURM_NTASKS_PER_NODE" + "-e" "SLURM_NNODES=$SLURM_NNODES" + ) + # Execute command + set -x + $(eval echo $SRUN) docker exec "${DOCKERENV[@]}" -e MODE=TRAIN cont_${SLURM_JOB_ID} ./run_and_time.sh & + pids+=($!); + set +x + done + wait "${pids[@]}" + + ) |& tee ${LOGFILE_BASE}_$nrun.log + + ## SEED update + export SEED=$(od -A n -t d -N 3 /dev/urandom) + +done + +# Clean up (note: on SLURM we skip this, as the epilogue will take care of it) +if [[ $INSLURM -eq 0 ]]; then + docker rm -f cont_${SLURM_JOB_ID} +fi diff --git a/PyTorch/LanguageModeling/BERT/run_and_time.sh b/PyTorch/LanguageModeling/BERT/run_and_time.sh new file mode 100755 index 00000000..4c4a77c4 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/run_and_time.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +#echo "Multi-node $MULTI_NODE" +#echo "Dataset $DATASET" +## DL vars -- Change your parameters below +# To change the number of GPUs per node, change the sbatch param --ntasks-per-node in the launching script + +## Need to avoid virtualenv and do python directly +# train.py --data=/dev/shm/$DATASET \ +# train.py --data=/raid/datasets/$DATASET \ + +DGXSYSTEM=${DGXSYSTEM:-"DGX1"} +if [[ -f config_${DGXSYSTEM}.sh ]]; then + source config_${DGXSYSTEM}.sh +else + source config_DGX1.sh + echo "Unknown system, assuming DGX1" +fi +SLURM_NTASKS_PER_NODE=${SLURM_NTASKS_PER_NODE:-$DGXNGPU} +SLURM_JOB_ID=${SLURM_JOB_ID:-$RANDOM} +MULTI_NODE=${MULTI_NODE:-''} +echo "Run vars: id $SLURM_JOB_ID gpus $SLURM_NTASKS_PER_NODE mparams $MULTI_NODE" + +# run training +BIND_LAUNCH=1 ## should be the default + +if [[ $BIND_LAUNCH -eq 1 ]]; then + LAUNCH_OPT="bind_pyt --nsockets_per_node 2 --ncores_per_socket ${DGXSOCKETCORES} --nproc_per_node ${SLURM_NTASKS_PER_NODE} ${MULTI_NODE}" +else + LAUNCH_OPT="torch.distributed.launch --nproc_per_node ${SLURM_NTASKS_PER_NODE} ${MULTI_NODE}" +fi + +# Options +python -m $LAUNCH_OPT \ +run_pretraining.py --seed=${SEED} \ +--train_batch_size=${BATCHSIZE} \ +--learning_rate=${LEARNING_RATE} \ +--warmup_proportion=${WARMUP_UPDATES} \ +$EXTRA_PARAMS diff --git a/PyTorch/LanguageModeling/BERT/run_pretraining.py b/PyTorch/LanguageModeling/BERT/run_pretraining.py index c5e8fae2..e21476c7 100644 --- a/PyTorch/LanguageModeling/BERT/run_pretraining.py +++ b/PyTorch/LanguageModeling/BERT/run_pretraining.py @@ -1,6 +1,7 @@ # coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. -# +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. + # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -18,10 +19,10 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function - -#================== +# ================== import csv import os +import time import logging import argparse import random @@ -34,65 +35,73 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Datas from torch.utils.data.distributed import DistributedSampler import math from apex import amp - - +import multiprocessing from tokenization import BertTokenizer from modeling import BertForPreTraining, BertConfig -from optimization import BertAdam, BertAdam_FP16 +from optimization import BertLAMB -# from fused_adam_local import FusedAdamBert from file_utils import PYTORCH_PRETRAINED_BERT_CACHE - -from apex.optimizers import FusedAdam #, FP16_Optimizer -#from apex.optimizers import FusedAdam +from utils import is_main_process from apex.parallel import DistributedDataParallel as DDP from schedulers import LinearWarmUpScheduler +from apex.parallel.distributed import flat_dist_call +import amp_C +import apex_C +from apex.amp import _amp_state -logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', - datefmt = '%m/%d/%Y %H:%M:%S', - level = logging.INFO) +from concurrent.futures import ProcessPoolExecutor + +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt='%m/%d/%Y %H:%M:%S', + level=logging.INFO) logger = logging.getLogger(__name__) + +def create_pretraining_dataset(input_file, max_pred_length, shared_list, args): + + train_data = pretraining_dataset(input_file=input_file, max_pred_length=max_pred_length) + train_sampler = RandomSampler(train_data) + train_dataloader = DataLoader(train_data, sampler=train_sampler, + batch_size=args.train_batch_size * args.n_gpu, num_workers=4, + pin_memory=True) + # shared_list["0"] = (train_dataloader, input_file) + return train_dataloader, input_file + class pretraining_dataset(Dataset): def __init__(self, input_file, max_pred_length): self.input_file = input_file self.max_pred_length = max_pred_length f = h5py.File(input_file, "r") - self.input_ids = np.asarray(f["input_ids"][:]).astype(np.int64)#[num_instances x max_seq_length]) - self.input_masks = np.asarray(f["input_mask"][:]).astype(np.int64) #[num_instances x max_seq_length] - self.segment_ids = np.asarray(f["segment_ids"][:]).astype(np.int64) #[num_instances x max_seq_length] - self.masked_lm_positions = np.asarray(f["masked_lm_positions"][:]).astype(np.int64) #[num_instances x max_pred_length] - self.masked_lm_ids= np.asarray(f["masked_lm_ids"][:]).astype(np.int64) #[num_instances x max_pred_length] - self.next_sentence_labels = np.asarray(f["next_sentence_labels"][:]).astype(np.int64) # [num_instances] + keys = ['input_ids', 'input_mask', 'segment_ids', 'masked_lm_positions', 'masked_lm_ids', + 'next_sentence_labels'] + self.inputs = [np.asarray(f[key][:]) for key in keys] f.close() def __len__(self): 'Denotes the total number of samples' - return len(self.input_ids) + return len(self.inputs[0]) def __getitem__(self, index): - - input_ids= torch.from_numpy(self.input_ids[index]) # [max_seq_length] - input_mask = torch.from_numpy(self.input_masks[index]) #[max_seq_length] - segment_ids = torch.from_numpy(self.segment_ids[index])# [max_seq_length] - masked_lm_positions = torch.from_numpy(self.masked_lm_positions[index]) #[max_pred_length] - masked_lm_ids = torch.from_numpy(self.masked_lm_ids[index]) #[max_pred_length] - next_sentence_labels = torch.from_numpy(np.asarray(self.next_sentence_labels[index])) #[1] - + + [input_ids, input_mask, segment_ids, masked_lm_positions, masked_lm_ids, next_sentence_labels] = [ + torch.from_numpy(input[index].astype(np.int64)) if indice < 5 else torch.from_numpy( + np.asarray(input[index].astype(np.int64))) for indice, input in enumerate(self.inputs)] + masked_lm_labels = torch.ones(input_ids.shape, dtype=torch.long) * -1 index = self.max_pred_length # store number of masked tokens in index - if len((masked_lm_positions == 0).nonzero()) != 0: - index = (masked_lm_positions == 0).nonzero()[0].item() + padded_mask_indices = (masked_lm_positions == 0).nonzero() + if len(padded_mask_indices) != 0: + index = padded_mask_indices[0].item() masked_lm_labels[masked_lm_positions[:index]] = masked_lm_ids[:index] - return [input_ids, segment_ids, input_mask, masked_lm_labels, next_sentence_labels] + return [input_ids, segment_ids, input_mask, + masked_lm_labels, next_sentence_labels] -def main(): +def parse_arguments(): - print("IN NEW MAIN XD\n") parser = argparse.ArgumentParser() ## Required parameters @@ -186,233 +195,407 @@ def main(): help="Step to resume training from.") parser.add_argument('--num_steps_per_checkpoint', type=int, - default=2000, + default=100, help="Number of update steps until a model checkpoint is saved to disk.") - - + parser.add_argument('--phase2', + default=False, + action='store_true', + help="Whether to train with seq len 512") + parser.add_argument('--allreduce_post_accumulation', + default=False, + action='store_true', + help="Whether to do allreduces during gradient accumulation steps.") + parser.add_argument('--allreduce_post_accumulation_fp16', + default=False, + action='store_true', + help="Whether to do fp16 allreduce post accumulation.") + parser.add_argument('--accumulate_into_fp16', + default=False, + action='store_true', + help="Whether to use fp16 gradient accumulators.") + parser.add_argument('--phase1_end_step', + type=int, + default=7038, + help="Number of training steps in Phase1 - seq len 128") + parser.add_argument("--do_train", + default=False, + action='store_true', + help="Whether to run training.") args = parser.parse_args() + return args +def setup_training(args): - random.seed(args.seed) - np.random.seed(args.seed) - torch.manual_seed(args.seed) - - assert(torch.cuda.is_available()) + assert (torch.cuda.is_available()) if args.local_rank == -1: device = torch.device("cuda") - n_gpu = torch.cuda.device_count() + args.n_gpu = torch.cuda.device_count() else: torch.cuda.set_device(args.local_rank) device = torch.device("cuda", args.local_rank) - n_gpu = 1 + args.n_gpu = 1 # Initializes the distributed backend which will take care of sychronizing nodes/GPUs torch.distributed.init_process_group(backend='nccl', init_method='env://') - logger.info("device %s n_gpu %d distributed training %r", device, n_gpu, bool(args.local_rank != -1)) + logger.info("device %s n_gpu %d distributed training %r", device, args.n_gpu, bool(args.local_rank != -1)) if args.gradient_accumulation_steps < 1: raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( - args.gradient_accumulation_steps)) + args.gradient_accumulation_steps)) if args.train_batch_size % args.gradient_accumulation_steps != 0: raise ValueError("Invalid gradient_accumulation_steps parameter: {}, batch size {} should be divisible".format( - args.gradient_accumulation_steps, args.train_batch_size)) + args.gradient_accumulation_steps, args.train_batch_size)) args.train_batch_size = args.train_batch_size // args.gradient_accumulation_steps + if not args.do_train: + raise ValueError(" `do_train` must be True.") - - if not args.resume_from_checkpoint and os.path.exists(args.output_dir) and (os.listdir(args.output_dir) and os.listdir(args.output_dir)!=['logfile.txt']): + if not args.resume_from_checkpoint and os.path.exists(args.output_dir) and ( + os.listdir(args.output_dir) and os.listdir(args.output_dir) != ['logfile.txt']): raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) if not args.resume_from_checkpoint: os.makedirs(args.output_dir, exist_ok=True) + return device, args + +def prepare_model_and_optimizer(args, device): + # Prepare model config = BertConfig.from_json_file(args.config_file) + + # Padding for divisibility by 8 + if config.vocab_size % 8 != 0: + config.vocab_size += 8 - (config.vocab_size % 8) model = BertForPreTraining(config) - + checkpoint = None if not args.resume_from_checkpoint: global_step = 0 else: if args.resume_step == -1: model_names = [f for f in os.listdir(args.output_dir) if f.endswith(".pt")] args.resume_step = max([int(x.split('.pt')[0].split('_')[1].strip()) for x in model_names]) - global_step = args.resume_step checkpoint = torch.load(os.path.join(args.output_dir, "ckpt_{}.pt".format(global_step)), map_location="cpu") model.load_state_dict(checkpoint['model'], strict=False) - - print("resume step from ", args.resume_step) + if args.phase2: + global_step -= args.phase1_end_step + if is_main_process(): + print("resume step from ", args.resume_step) model.to(device) - - # Prepare optimizer param_optimizer = list(model.named_parameters()) - no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] - optimizer_grouped_parameters = [ - {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, - {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} - ] - + no_decay = ['bias', 'gamma', 'beta', 'LayerNorm'] + optimizer_grouped_parameters = [] + names = [] + count = 1 + for n, p in param_optimizer: + count += 1 + if not any(nd in n for nd in no_decay): + optimizer_grouped_parameters.append({'params': [p], 'weight_decay': 0.01, 'name': n}) + names.append({'params': [n], 'weight_decay': 0.01}) + if any(nd in n for nd in no_decay): + optimizer_grouped_parameters.append({'params': [p], 'weight_decay': 0.00, 'name': n}) + names.append({'params': [n], 'weight_decay': 0.00}) + + optimizer = BertLAMB(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=args.max_steps) if args.fp16: - optimizer = FusedAdam(optimizer_grouped_parameters, - lr=args.learning_rate, - #warmup=args.warmup_proportion, - #t_total=args.max_steps, - bias_correction=False, - weight_decay=0.01, - max_grad_norm=1.0) - if args.loss_scale == 0: # optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) - model, optimizer = amp.initialize(model, optimizer, opt_level="O2", keep_batchnorm_fp32=False, loss_scale="dynamic") + model, optimizer = amp.initialize(model, optimizer, opt_level="O2", loss_scale="dynamic", + master_weights=False if args.accumulate_into_fp16 else True) else: # optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) - model, optimizer = amp.initialize(model, optimizer, opt_level="O2", keep_batchnorm_fp32=False, loss_scale=args.loss_scale) - - scheduler = LinearWarmUpScheduler(optimizer, warmup=args.warmup_proportion, total_steps=args.max_steps) - - else: - optimizer = BertAdam(optimizer_grouped_parameters, - lr=args.learning_rate, - warmup=args.warmup_proportion, - t_total=args.max_steps) - - + model, optimizer = amp.initialize(model, optimizer, opt_level="O2", loss_scale=args.loss_scale, + master_weights=False if args.accumulate_into_fp16 else True) + amp._amp_state.loss_scalers[0]._loss_scale = 2**20 if args.resume_from_checkpoint: + if args.phase2: + keys = list(checkpoint['optimizer']['state'].keys()) + #Override hyperparameters from Phase 1 + for key in keys: + checkpoint['optimizer']['state'][key]['step'] = global_step + for iter, item in enumerate(checkpoint['optimizer']['param_groups']): + checkpoint['optimizer']['param_groups'][iter]['t_total'] = args.max_steps + checkpoint['optimizer']['param_groups'][iter]['warmup'] = args.warmup_proportion + checkpoint['optimizer']['param_groups'][iter]['lr'] = args.learning_rate optimizer.load_state_dict(checkpoint['optimizer']) # , strict=False) - - + # Restore AMP master parameters + if args.fp16: + optimizer._lazy_init_maybe_master_weights() + optimizer._amp_stash.lazy_init_called = True + optimizer.load_state_dict(checkpoint['optimizer']) + for param, saved_param in zip(amp.master_params(optimizer), checkpoint['master params']): + param.data.copy_(saved_param.data) + if args.local_rank != -1: - model = DDP(model) - elif n_gpu > 1: - model = torch.nn.DataParallel(model) - - - files = [os.path.join(args.input_dir, f) for f in os.listdir(args.input_dir) if os.path.isfile(os.path.join(args.input_dir, f))] - files.sort() - - num_files = len(files) - - - logger.info("***** Running training *****") - # logger.info(" Num examples = %d", len(train_data)) - logger.info(" Batch size = %d", args.train_batch_size) - print(" LR = ", args.learning_rate) - - - model.train() - print("Training. . .") - - most_recent_ckpts_paths = [] - - print("Training. . .") - tr_loss = 0.0 # total added training loss - average_loss = 0.0 # averaged loss every args.log_freq steps - epoch = 0 - training_steps = 0 - while True: - if not args.resume_from_checkpoint: - random.shuffle(files) - f_start_id = 0 + if not args.allreduce_post_accumulation: + model = DDP(model, message_size=250000000, gradient_predivide_factor=torch.distributed.get_world_size()) else: - f_start_id = checkpoint['files'][0] - files = checkpoint['files'][1:] - args.resume_from_checkpoint = False - for f_id in range(f_start_id, len(files)): - data_file = files[f_id] - logger.info("file no %s file %s" %(f_id, data_file)) - train_data = pretraining_dataset(input_file=data_file, max_pred_length=args.max_predictions_per_seq) + flat_dist_call([param.data for param in model.parameters()], torch.distributed.broadcast, (0,) ) + elif args.n_gpu > 1: + model = torch.nn.DataParallel(model) - if args.local_rank == -1: - train_sampler = RandomSampler(train_data) - train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size * n_gpu, num_workers=4, pin_memory=True) + return model, optimizer, checkpoint, global_step + +def take_optimizer_step(args, optimizer, model, overflow_buf, global_step): + + if args.allreduce_post_accumulation: + # manually allreduce gradients after all accumulation steps + # check for Inf/NaN + # 1. allocate an uninitialized buffer for flattened gradient + scaler = _amp_state.loss_scalers[0] + master_grads = [p.grad for p in amp.master_params(optimizer) if p.grad is not None] + flat_grad_size = sum(p.numel() for p in master_grads) + allreduce_dtype = torch.float16 if args.allreduce_post_accumulation_fp16 else torch.float32 + flat_raw = torch.empty(flat_grad_size, device='cuda', dtype=allreduce_dtype) + # 2. combine unflattening and predivision of unscaled 'raw' gradient + allreduced_views = apex_C.unflatten(flat_raw, master_grads) + overflow_buf.zero_() + amp_C.multi_tensor_scale(65536, + overflow_buf, + [master_grads, allreduced_views], + scaler.loss_scale() / (torch.distributed.get_world_size() * args.gradient_accumulation_steps)) + # 3. sum gradient across ranks. Because of the predivision, this averages the gradient + torch.distributed.all_reduce(flat_raw) + # 4. combine unscaling and unflattening of allreduced gradient + overflow_buf.zero_() + amp_C.multi_tensor_scale(65536, + overflow_buf, + [allreduced_views, master_grads], + 1./scaler.loss_scale()) + # 5. update loss scale + scaler = _amp_state.loss_scalers[0] + old_overflow_buf = scaler._overflow_buf + scaler._overflow_buf = overflow_buf + had_overflow = scaler.update_scale() + scaler._overfloat_buf = old_overflow_buf + # 6. call optimizer step function + if had_overflow == 0: + optimizer.step() + global_step += 1 + else: + # Overflow detected, print message and clear gradients + if is_main_process(): + print(("Rank {} :: Gradient overflow. Skipping step, " + + "reducing loss scale to {}").format( + torch.distributed.get_rank(), + scaler.loss_scale())) + if _amp_state.opt_properties.master_weights: + for param in optimizer._amp_stash.all_fp32_from_fp16_params: + param.grad = None + for param in model.parameters(): + param.grad = None + else: + optimizer.step() + #optimizer.zero_grad() + for param in model.parameters(): + param.grad = None + global_step += 1 + + return global_step + +def main(): + + args = parse_arguments() + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + + device, args = setup_training(args) + + # Prepare optimizer + model, optimizer, checkpoint, global_step = prepare_model_and_optimizer(args, device) + + if is_main_process(): + print("SEED {}".format(args.seed)) + + if args.do_train: + if is_main_process(): + logger.info("***** Running training *****") + # logger.info(" Num examples = %d", len(train_data)) + logger.info(" Batch size = %d", args.train_batch_size) + print(" LR = ", args.learning_rate) + print("Training. . .") + + model.train() + most_recent_ckpts_paths = [] + average_loss = 0.0 # averaged loss every args.log_freq steps + epoch = 0 + training_steps = 0 + + pool = ProcessPoolExecutor(1) + + # Note: We loop infinitely over epochs, termination is handled via iteration count + while True: + thread = None + if not args.resume_from_checkpoint or epoch > 0 or args.phase2: + files = [os.path.join(args.input_dir, f) for f in os.listdir(args.input_dir) if + os.path.isfile(os.path.join(args.input_dir, f))] + files.sort() + num_files = len(files) + random.shuffle(files) + f_start_id = 0 else: - train_sampler = DistributedSampler(train_data) - train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size, num_workers=4, pin_memory=True) + f_start_id = checkpoint['files'][0] + files = checkpoint['files'][1:] + args.resume_from_checkpoint = False + num_files = len(files) - for step, batch in enumerate(tqdm(train_dataloader, desc="File Iteration")): - - training_steps += 1 - batch = [t.to(device) for t in batch] - input_ids, segment_ids, input_mask, masked_lm_labels, next_sentence_labels = batch#\ - loss = model(input_ids=input_ids, token_type_ids=segment_ids, attention_mask=input_mask, masked_lm_labels=masked_lm_labels, next_sentence_label=next_sentence_labels, checkpoint_activations=args.checkpoint_activations) - if n_gpu > 1: - loss = loss.mean() # mean() to average on multi-gpu. - if args.gradient_accumulation_steps > 1: - loss = loss / args.gradient_accumulation_steps + shared_file_list = {} - if args.fp16: - # optimizer.backward(loss) - with amp.scale_loss(loss, optimizer) as scaled_loss: - scaled_loss.backward() - else: - loss.backward() - tr_loss += loss - average_loss += loss.item() + if torch.distributed.is_initialized() and torch.distributed.get_world_size() > num_files: + remainder = torch.distributed.get_world_size() % num_files + data_file = files[(f_start_id*torch.distributed.get_world_size()+torch.distributed.get_rank() + remainder*f_start_id)%num_files] + else: + data_file = files[(f_start_id*torch.distributed.get_world_size()+torch.distributed.get_rank())%num_files] - if training_steps % args.gradient_accumulation_steps == 0: - if args.fp16: - scheduler.step() - optimizer.step() - optimizer.zero_grad() - global_step += 1 + previous_file = data_file + + train_data = pretraining_dataset(data_file, args.max_predictions_per_seq) + train_sampler = RandomSampler(train_data) + train_dataloader = DataLoader(train_data, sampler=train_sampler, + batch_size=args.train_batch_size * args.n_gpu, num_workers=4, + pin_memory=True) + # shared_file_list["0"] = (train_dataloader, data_file) + + overflow_buf = None + if args.allreduce_post_accumulation: + overflow_buf = torch.cuda.IntTensor([0]) + + for f_id in range(f_start_id + 1 , len(files)): - + # torch.cuda.synchronize() + # f_start = time.time() + if torch.distributed.get_world_size() > num_files: + data_file = files[(f_id*torch.distributed.get_world_size()+torch.distributed.get_rank() + remainder*f_id)%num_files] + else: + data_file = files[(f_id*torch.distributed.get_world_size()+torch.distributed.get_rank())%num_files] - if training_steps == 1 * args.gradient_accumulation_steps: - logger.info("Step:{} Average Loss = {} Step Loss = {} LR {}".format(global_step, average_loss, - loss.item(), optimizer.param_groups[0]['lr'])) + logger.info("file no %s file %s" % (f_id, previous_file)) - if training_steps % (args.log_freq * args.gradient_accumulation_steps) == 0: - logger.info("Step:{} Average Loss = {} Step Loss = {} LR {}".format(global_step, average_loss / args.log_freq, - loss.item(), optimizer.param_groups[0]['lr'])) - average_loss = 0 + previous_file = data_file - if global_step >= args.max_steps or training_steps % ( - args.num_steps_per_checkpoint * args.gradient_accumulation_steps) == 0: + # train_dataloader = shared_file_list["0"][0] - if (not torch.distributed.is_initialized() or (torch.distributed.is_initialized() and torch.distributed.get_rank() == 0)): - # Save a trained model - logger.info("** ** * Saving fine - tuned model ** ** * ") - model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self - output_save_file = os.path.join(args.output_dir, "ckpt_{}.pt".format(global_step)) - - torch.save({'model' : model_to_save.state_dict(), - 'optimizer' : optimizer.state_dict(), - 'files' : [f_id] + files }, output_save_file) - - most_recent_ckpts_paths.append(output_save_file) - if len(most_recent_ckpts_paths) > 3: - ckpt_to_be_removed = most_recent_ckpts_paths.pop(0) - os.remove(ckpt_to_be_removed) + # thread = multiprocessing.Process( + # name="LOAD DATA:" + str(f_id) + ":" + str(data_file), + # target=create_pretraining_dataset, + # args=(data_file, args.max_predictions_per_seq, shared_file_list, args, n_gpu) + # ) + # thread.start() + dataset_future = pool.submit(create_pretraining_dataset, data_file, args.max_predictions_per_seq, shared_file_list, args) + # torch.cuda.synchronize() + # f_end = time.time() + # print('[{}] : shard overhead {}'.format(torch.distributed.get_rank(), f_end - f_start)) + + train_iter = tqdm(train_dataloader, desc="Iteration") if is_main_process() else train_dataloader + for step, batch in enumerate(train_iter): + # torch.cuda.synchronize() + # iter_start = time.time() + + training_steps += 1 + batch = [t.to(device) for t in batch] + input_ids, segment_ids, input_mask, masked_lm_labels, next_sentence_labels = batch + loss = model(input_ids=input_ids, token_type_ids=segment_ids, attention_mask=input_mask, + masked_lm_labels=masked_lm_labels, next_sentence_label=next_sentence_labels, + checkpoint_activations=args.checkpoint_activations) + if args.n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + + divisor = args.gradient_accumulation_steps + if args.gradient_accumulation_steps > 1: + if not args.allreduce_post_accumulation: + # this division was merged into predivision + loss = loss / args.gradient_accumulation_steps + divisor = 1.0 + if args.fp16: + with amp.scale_loss(loss, optimizer, delay_overflow_check=args.allreduce_post_accumulation) as scaled_loss: + scaled_loss.backward() + else: + loss.backward() + average_loss += loss.item() + + if training_steps % args.gradient_accumulation_steps == 0: + global_step = take_optimizer_step(args, optimizer, model, overflow_buf, global_step) if global_step >= args.max_steps: - tr_loss = tr_loss * args.gradient_accumulation_steps / training_steps + last_num_steps = global_step % args.log_freq + last_num_steps = args.log_freq if last_num_steps == 0 else last_num_steps + average_loss = torch.tensor(average_loss, dtype=torch.float32).cuda() + average_loss = average_loss / (last_num_steps * divisor) if (torch.distributed.is_initialized()): - tr_loss /= torch.distributed.get_world_size() - torch.distributed.all_reduce(tr_loss) - logger.info("Total Steps:{} Final Loss = {}".format(training_steps, tr_loss.item())) - return - del train_dataloader - del train_sampler - del train_data - #for obj in gc.get_objects(): - # if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)): - # del obj + average_loss /= torch.distributed.get_world_size() + torch.distributed.all_reduce(average_loss) + if is_main_process(): + logger.info("Total Steps:{} Final Loss = {}".format(training_steps, average_loss.item())) + elif training_steps % (args.log_freq * args.gradient_accumulation_steps) == 0: + if is_main_process(): + print("Step:{} Average Loss = {} Step Loss = {} LR {}".format(global_step, average_loss / ( + args.log_freq * divisor), + loss.item() * args.gradient_accumulation_steps / divisor, + optimizer.param_groups[0][ + 'lr'])) + average_loss = 0 - torch.cuda.empty_cache() - epoch += 1 + if global_step >= args.max_steps or training_steps % ( + args.num_steps_per_checkpoint * args.gradient_accumulation_steps) == 0: + if is_main_process(): + # Save a trained model + logger.info("** ** * Saving fine - tuned model ** ** * ") + model_to_save = model.module if hasattr(model, + 'module') else model # Only save the model it-self + if args.resume_step < 0 or not args.phase2: + output_save_file = os.path.join(args.output_dir, "ckpt_{}.pt".format(global_step)) + else: + output_save_file = os.path.join(args.output_dir, "ckpt_{}.pt".format(global_step + args.phase1_end_step)) + if args.do_train: + torch.save({'model': model_to_save.state_dict(), + 'optimizer': optimizer.state_dict(), + 'master params': list(amp.master_params(optimizer)), + 'files': [f_id] + files}, output_save_file) + + most_recent_ckpts_paths.append(output_save_file) + if len(most_recent_ckpts_paths) > 3: + ckpt_to_be_removed = most_recent_ckpts_paths.pop(0) + os.remove(ckpt_to_be_removed) + + if global_step >= args.max_steps: + del train_dataloader + # thread.join() + return args + + + # torch.cuda.synchronize() + # iter_end = time.time() + + # if torch.distributed.get_rank() == 0: + # print('step {} : {}'.format(global_step, iter_end - iter_start)) + + del train_dataloader + # thread.join() + # Make sure pool has finished and switch train_dataloader + # NOTE: Will block until complete + train_dataloader, data_file = dataset_future.result(timeout=None) + + epoch += 1 if __name__ == "__main__": - main() + now = time.time() + args = main() + if is_main_process(): + print("Total time taken {}".format(time.time() - now)) diff --git a/PyTorch/LanguageModeling/BERT/run_pretraining_inference.py b/PyTorch/LanguageModeling/BERT/run_pretraining_inference.py index ecfde31f..a6dfa285 100644 --- a/PyTorch/LanguageModeling/BERT/run_pretraining_inference.py +++ b/PyTorch/LanguageModeling/BERT/run_pretraining_inference.py @@ -101,11 +101,15 @@ def main(): type=str, required=False, help="The BERT model config") - parser.add_argument("--ckpt_dir", + ckpt_group = parser.add_mutually_exclusive_group(required=True) + ckpt_group.add_argument("--ckpt_dir", default=None, type=str, - required=True, help="The ckpt directory, e.g. /results") + ckpt_group.add_argument("--ckpt_path", + default=None, + type=str, + help="Path to the specific checkpoint") group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--eval', dest='do_eval', action='store_true') @@ -184,16 +188,21 @@ def main(): # Prepare model config = BertConfig.from_json_file(args.config_file) + # Padding for divisibility by 8 + if config.vocab_size % 8 != 0: + config.vocab_size += 8 - (config.vocab_size % 8) model = BertForPreTraining(config) - - if args.ckpt_step == -1: - #retrieve latest model - model_names = [f for f in os.listdir(args.ckpt_dir) if f.endswith(".model")] - args.ckpt_step = max([int(x.split('.model')[0].split('_')[1].strip()) for x in model_names]) - print("load model saved at iteraton", args.ckpt_step) - model_file = os.path.join(args.ckpt_dir, "ckpt_" + str(args.ckpt_step) + ".model") - state_dict = torch.load(model_file, map_location="cpu") + if args.ckpt_dir: + if args.ckpt_step == -1: + #retrieve latest model + model_names = [f for f in os.listdir(args.ckpt_dir) if f.endswith(".model")] + args.ckpt_step = max([int(x.split('.model')[0].split('_')[1].strip()) for x in model_names]) + print("load model saved at iteraton", args.ckpt_step) + model_file = os.path.join(args.ckpt_dir, "ckpt_" + str(args.ckpt_step) + ".pt") + else: + model_file = args.ckpt_path + state_dict = torch.load(model_file, map_location="cpu")["model"] model.load_state_dict(state_dict, strict=False) if args.fp16: diff --git a/PyTorch/LanguageModeling/BERT/run_squad.py b/PyTorch/LanguageModeling/BERT/run_squad.py index 0066caab..8f3ba8b4 100644 --- a/PyTorch/LanguageModeling/BERT/run_squad.py +++ b/PyTorch/LanguageModeling/BERT/run_squad.py @@ -916,11 +916,16 @@ def main(): # Prepare model config = BertConfig.from_json_file(args.config_file) + # Padding for divisibility by 8 + if config.vocab_size % 8 != 0: + config.vocab_size += 8 - (config.vocab_size % 8) + model = BertForQuestionAnswering(config) # model = BertForQuestionAnswering.from_pretrained(args.bert_model, # cache_dir=os.path.join(str(PYTORCH_PRETRAINED_BERT_CACHE), 'distributed_{}'.format(args.local_rank))) - model.load_state_dict(torch.load(args.init_checkpoint, map_location='cpu'), strict=False) - + print("USING CHECKOINT") + model.load_state_dict(torch.load(args.init_checkpoint, map_location='cpu')["model"], strict=False) + print("USED CHECKPOINT \n\n") model.to(device) if args.fp16 and args.old: model.half() diff --git a/PyTorch/LanguageModeling/BERT/scripts/docker/build.sh b/PyTorch/LanguageModeling/BERT/scripts/docker/build.sh index c365f7ab..472ac1fe 100644 --- a/PyTorch/LanguageModeling/BERT/scripts/docker/build.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/docker/build.sh @@ -1,3 +1,2 @@ #!/bin/bash - -docker build . --rm -t bert +docker build . --rm -t bert_pyt diff --git a/PyTorch/LanguageModeling/BERT/scripts/docker/launch.sh b/PyTorch/LanguageModeling/BERT/scripts/docker/launch.sh index 40a459b9..8065632b 100644 --- a/PyTorch/LanguageModeling/BERT/scripts/docker/launch.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/docker/launch.sh @@ -1,17 +1,18 @@ #!/bin/bash -DATA_DIR=${1:-"/mnt/dldata/bert"} -VOCAB_DIR=${2:-"/mnt/dldata/bert/vocab"} -CHECKPOINT_DIR=${3:-"/mnt/dldata/bert/pretrained_models_nvidia_pytorch"} +DATA_DIR=${1:-"${PWD}/data/hdf5/books_wiki_en_corpus"} +VOCAB_DIR=${2:-"${PWD}/data/download/google_pretrained_weights/uncased_L-24_H-1024_A-16"} +CHECKPOINT_DIR=${3:-"${PWD}/checkpoints"} +RESULTS_DIR=${4:-"${PWD}/results"} docker run -it --rm \ --runtime=nvidia \ - -p 8888:8888 \ --shm-size=1g \ --ulimit memlock=-1 \ --ulimit stack=67108864 \ - -v $DATA_DIR:/workspace/bert/data \ + -v ${PWD}:/workspace/bert \ + -v $DATA_DIR:/workspace/bert/data/hdf5/books_wiki_en_corpus \ -v $CHECKPOINT_DIR:/workspace/checkpoints \ - -v $VOCAB_DIR:/workspace/bert/vocab \ - -v $PWD/results:/results \ - bert bash + -v $VOCAB_DIR:/workspace/bert/data/download/google_pretrained_weights/uncased_L-24_H-1024_A-16 \ + -v $RESULTS_DIR:/results \ + bert_pyt bash diff --git a/PyTorch/LanguageModeling/BERT/scripts/run_pretraining.sh b/PyTorch/LanguageModeling/BERT/scripts/run_pretraining.sh index 17739757..2148de3b 100644 --- a/PyTorch/LanguageModeling/BERT/scripts/run_pretraining.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/run_pretraining.sh @@ -1,28 +1,38 @@ #!/bin/bash echo "Container nvidia build = " $NVIDIA_BUILD_ID -train_batch_size=${1:-14} -learning_rate=${2:-"0.4375e-4"} +train_batch_size=${1:-8192} +learning_rate=${2:-"6e-3"} precision=${3:-"fp16"} num_gpus=${4:-8} -warmup_proportion=${5:-"0.01"} -train_steps=${6:-2285714} -save_checkpoint_steps=${7:-2000} +warmup_proportion=${5:-"0.2843"} +train_steps=${6:-7038} +save_checkpoint_steps=${7:-200} resume_training=${8:-"false"} create_logfile=${9:-"true"} -accumulate_gradients=${10:-"false"} -gradient_accumulation_steps=${11:-1} -seed=${12:-42} -job_name=${13:-"job"} +accumulate_gradients=${10:-"true"} +gradient_accumulation_steps=${11:-128} +seed=${12:-$RANDOM} +job_name=${13:-"bert_lamb_pretraining"} +allreduce_post_accumulation=${14:-"true"} +allreduce_post_accumulation_fp16=${15:-"true"} +accumulate_into_fp16=${16:-"true"} +train_batch_size_phase2=${1:-4096} +learning_rate_phase2=${2:-"4e-3"} +warmup_proportion_phase2=${5:-"0.128"} +train_steps_phase2=${6:-1563} +gradient_accumulation_steps_phase2=${11:-512} -DATASET=wikipedia_corpus # change this for other datasets +DATASET=books_wiki_en_corpus # change this for other datasets -DATA_DIR=data/${DATASET}/hdf5_shards/ +DATA_DIR=data/${DATASET}/training/ +#DATA_DIR=data/hdf5/wiki+book/bert_pytorch_wikipedia_bookcorpus_interseqmix_seq_128_pred_20/ BERT_CONFIG=bert_config.json RESULTS_DIR=/results CHECKPOINTS_DIR=/results/checkpoints + mkdir -p $CHECKPOINTS_DIR @@ -63,6 +73,21 @@ if [ "$resume_training" == "true" ] ; then CHECKPOINT="--resume_from_checkpoint" fi +ALL_REDUCE_POST_ACCUMULATION="" +if [ "$allreduce_post_accumulation" == "true" ] ; then + ALL_REDUCE_POST_ACCUMULATION="--allreduce_post_accumulation" +fi + +ALL_REDUCE_POST_ACCUMULATION_FP16="" +if [ "$allreduce_post_accumulation_fp16" == "true" ] ; then + ALL_REDUCE_POST_ACCUMULATION_FP16="--allreduce_post_accumulation_fp16" +fi + +ACCUMULATE_INTO_FP16="" +if [ "$accumulate_into_fp16" == "true" ] ; then + ACCUMULATE_INTO_FP16="--accumulate_into_fp16" +fi + echo $DATA_DIR INPUT_DIR=$DATA_DIR CMD=" /workspace/bert/run_pretraining.py" @@ -71,8 +96,8 @@ CMD+=" --output_dir=$CHECKPOINTS_DIR" CMD+=" --config_file=$BERT_CONFIG" CMD+=" --bert_model=bert-large-uncased" CMD+=" --train_batch_size=$train_batch_size" -CMD+=" --max_seq_length=512" -CMD+=" --max_predictions_per_seq=80" +CMD+=" --max_seq_length=128" +CMD+=" --max_predictions_per_seq=20" CMD+=" --max_steps=$train_steps" CMD+=" --warmup_proportion=$warmup_proportion" CMD+=" --num_steps_per_checkpoint=$save_checkpoint_steps" @@ -81,7 +106,10 @@ CMD+=" --seed=$seed" CMD+=" $PREC" CMD+=" $ACCUMULATE_GRADIENTS" CMD+=" $CHECKPOINT" - +CMD+=" $ALL_REDUCE_POST_ACCUMULATION" +CMD+=" $ALL_REDUCE_POST_ACCUMULATION_FP16" +CMD+=" $ACCUMULATE_INTO_FP16" +CMD+=" --do_train" if [ "$num_gpus" -gt 1 ] ; then CMD="python3 -m torch.distributed.launch --nproc_per_node=$num_gpus $CMD" @@ -115,39 +143,107 @@ target_loss=15 THROUGHPUT=10 THRESHOLD=0.9 -throughput=`cat $LOGFILE | grep Iteration | tail -1 | awk -F's/it' '{print $1}' | awk -F',' '{print $2}' | egrep -o [0-9.]+` +throughput=`cat $LOGFILE | grep Iteration | tail -1 | awk -F'it/s' '{print $1}' | awk -F',' '{print $2}' | egrep -o [0-9.]+` loss=`cat $LOGFILE | grep 'Average Loss' | tail -1 | awk -F'Average Loss =' '{print $2}' | awk -F' ' '{print $1}' | egrep -o [0-9.]+` final_loss=`cat $LOGFILE | grep 'Total Steps' | tail -1 | awk -F'Final Loss =' '{print $2}' | awk -F' ' '{print $1}' | egrep -o [0-9.]+` -echo "throughput: $throughput s/it" +train_perf=$(awk 'BEGIN {print ('$throughput' * '$num_gpus' * '$train_batch_size')}') +echo " training throughput phase1: $train_perf sequences/second" echo "average loss: $loss" echo "final loss: $final_loss" -ACCURACY_TEST_RESULT=$(awk 'BEGIN {print ('${loss}' <= '${target_loss}')}') +#Start Phase2 -if [ $ACCURACY_TEST_RESULT == 1 ]; - then - echo "&&&& ACCURACY TEST PASSED" - else - echo "&&&& ACCURACY TEST FAILED" - fi +DATASET=merged_wiki+books_phase2 # change this for other datasets -PERFORMANCE_TEST_RESULT=$(awk 'BEGIN {print ('${throughput}' <= ('${THROUGHPUT}' * '${THRESHOLD}'))}') +DATA_DIR=data/${DATASET}/hdf5_shards/ +#DATA_DIR=data/hdf5/wiki+book/bert_pytorch_wikipedia_bookcorpus_interseqmix_seq_512_pred_80/ -if [ $PERFORMANCE_TEST_RESULT == 1 ]; - then - echo "&&&& PERFORMANCE TEST PASSED" - else - echo "&&&& PERFORMANCE TEST FAILED" - fi +PREC="" +if [ "$precision" = "fp16" ] ; then + PREC="--fp16" +elif [ "$precision" = "fp32" ] ; then + PREC="" +else + echo "Unknown argument" + exit -2 +fi -if [ $ACCURACY_TEST_RESULT == 1 -a $PERFORMANCE_TEST_RESULT == 1 ]; - then - echo "&&&& PASSED" - exit 0 - else - echo "&&&& FAILED" - exit 1 - fi +ACCUMULATE_GRADIENTS="" +if [ "$accumulate_gradients" == "true" ] ; then + ACCUMULATE_GRADIENTS="--gradient_accumulation_steps=$gradient_accumulation_steps_phase2" +fi + +ALL_REDUCE_POST_ACCUMULATION="" +if [ "$allreduce_post_accumulation" == "true" ] ; then + ALL_REDUCE_POST_ACCUMULATION="--allreduce_post_accumulation" +fi + +ALL_REDUCE_POST_ACCUMULATION_FP16="" +if [ "$allreduce_post_accumulation_fp16" == "true" ] ; then + ALL_REDUCE_POST_ACCUMULATION_FP16="--allreduce_post_accumulation_fp16" +fi + +ACCUMULATE_INTO_FP16="" +if [ "$accumulate_into_fp16" == "true" ] ; then + ACCUMULATE_INTO_FP16="--accumulate_into_fp16" +fi + +echo $DATA_DIR +INPUT_DIR=$DATA_DIR +CMD=" /workspace/bert/run_pretraining.py" +CMD+=" --input_dir=$DATA_DIR" +CMD+=" --output_dir=$CHECKPOINTS_DIR" +CMD+=" --config_file=$BERT_CONFIG" +CMD+=" --bert_model=bert-large-uncased" +CMD+=" --train_batch_size=$train_batch_size_phase2" +CMD+=" --max_seq_length=512" +CMD+=" --max_predictions_per_seq=80" +CMD+=" --max_steps=$train_steps_phase2" +CMD+=" --warmup_proportion=$warmup_proportion_phase2" +CMD+=" --num_steps_per_checkpoint=$save_checkpoint_steps" +CMD+=" --learning_rate=$learning_rate_phase2" +CMD+=" --seed=$seed" +CMD+=" $PREC" +CMD+=" $ACCUMULATE_GRADIENTS" +CMD+=" $CHECKPOINT" +CMD+=" $ALL_REDUCE_POST_ACCUMULATION" +CMD+=" $ALL_REDUCE_POST_ACCUMULATION_FP16" +CMD+=" $ACCUMULATE_INTO_FP16" +CMD+=" --do_train --phase2 --resume_from_checkpoint --phase1_end_step=$train_steps" + +if [ "$num_gpus" -gt 1 ] ; then + CMD="python3 -m torch.distributed.launch --nproc_per_node=$num_gpus $CMD" +else + CMD="python3 $CMD" +fi +if [ "$create_logfile" = "true" ] ; then + export GBS=$(expr $train_batch_size \* $num_gpus) + printf -v TAG "pyt_bert_pretraining_%s_gbs%d" "$precision" $GBS + DATESTAMP=`date +'%y%m%d%H%M%S'` + LOGFILE=$RESULTS_DIR/$job_name.$TAG.$DATESTAMP.log + printf "Logs written to %s\n" "$LOGFILE" +fi + +set -x +if [ -z "$LOGFILE" ] ; then + $CMD +else + ( + $CMD + ) |& tee $LOGFILE +fi + +set +x + +echo "finished phase2" +throughput=`cat $LOGFILE | grep Iteration | tail -1 | awk -F'it/s' '{print $1}' | awk -F',' '{print $2}' | egrep -o [0-9.]+` +loss=`cat $LOGFILE | grep 'Average Loss' | tail -1 | awk -F'Average Loss =' '{print $2}' | awk -F' ' '{print $1}' | egrep -o [0-9.]+` +final_loss=`cat $LOGFILE | grep 'Total Steps' | tail -1 | awk -F'Final Loss =' '{print $2}' | awk -F' ' '{print $1}' | egrep -o [0-9.]+` + +train_perf=$(awk 'BEGIN {print ('$throughput' * '$num_gpus' * '$train_batch_size_phase2')}') +echo " training throughput phase2: $train_perf sequences/second" +echo "average loss: $loss" +echo "final loss: $final_loss" diff --git a/PyTorch/LanguageModeling/BERT/scripts/run_pretraining_inference.sh b/PyTorch/LanguageModeling/BERT/scripts/run_pretraining_inference.sh index 5e7519c4..7e98c756 100644 --- a/PyTorch/LanguageModeling/BERT/scripts/run_pretraining_inference.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/run_pretraining_inference.sh @@ -96,51 +96,6 @@ else fi set +x -target_loss=15 -THROUGHPUT=1.0 -THRESHOLD=0.9 - throughput=`cat $LOGFILE | grep Iteration | tail -1 | awk -F'it/s' '{print $1}' | awk -F',' '{print $2}' | egrep -o [0-9.]+` - - -echo "throughput: $throughput it/s" - - -PERFORMANCE_TEST_RESULT=$(awk 'BEGIN {print ('${throughput}' >= \ - ('${THROUGHPUT}' * '${THRESHOLD}'))}') - -if [ $PERFORMANCE_TEST_RESULT == 1 ]; - then - echo "&&&& PERFORMANCE TEST PASSED" - else - echo "&&&& PERFORMANCE TEST FAILED" - fi - - -if [ "$inference_mode" = "eval" ] ; then - loss=`cat $LOGFILE | grep Finished | tail -1 | awk -F'Final Loss =' '{print $2}' | awk -F' ' '{print $1}' | egrep -o [0-9.]+` - - - echo "final loss: $loss" - - - ACCURACY_TEST_RESULT=$(awk 'BEGIN {print ('${loss}' <= '${target_loss}')}') - - if [ $ACCURACY_TEST_RESULT == 1 ]; - then - echo "&&&& ACCURACY TEST PASSED" - else - echo "&&&& ACCURACY TEST FAILED" - fi - - - if [ $ACCURACY_TEST_RESULT == 1 -a $PERFORMANCE_TEST_RESULT == 1 ]; - then - echo "&&&& PASSED" - exit 0 - else - echo "&&&& FAILED" - exit 1 - fi -fi - +inference_perf=$(awk 'BEGIN {print ('$throughput' * '$num_gpus' * '$eval_batch_size')}') +echo " inference throughput : $inference_perf sequences/second" \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/scripts/run_squad.sh b/PyTorch/LanguageModeling/BERT/scripts/run_squad.sh index db52e3a2..23c316ea 100755 --- a/PyTorch/LanguageModeling/BERT/scripts/run_squad.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/run_squad.sh @@ -13,7 +13,7 @@ precision=${5:-"fp16"} num_gpu=${6:-"8"} seed=${7:-"1"} squad_dir=${8:-"/workspace/bert/data/squad/v1.1"} -vocab_file=${9:-"/workspace/bert/vocab/vocab"} +vocab_file=${9:-"/workspace/bert/data/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt"} OUT_DIR=${10:-"/results/SQuAD"} mode=${11:-"train eval"} CONFIG_FILE=${12:-"/workspace/bert/bert_config.json"} diff --git a/PyTorch/LanguageModeling/BERT/scripts/start_pretraining.sh b/PyTorch/LanguageModeling/BERT/scripts/start_pretraining.sh index 7732ae90..a3155bfe 100644 --- a/PyTorch/LanguageModeling/BERT/scripts/start_pretraining.sh +++ b/PyTorch/LanguageModeling/BERT/scripts/start_pretraining.sh @@ -45,7 +45,7 @@ fi printf -v EXTRA_PARAMS "%d %d %e %s 1 %d %d %d false" $train_batch_size $eval_batch_size $learning_rate "$precision" $warmup_proportion $train_steps $save_checkpoint_steps export ROOTDIR=$root_dir -export DATA_DIR=${DATA_DIR:-$CODEDIR/data/wikipedia_corpus/pyt_hdf5_shards} +export DATA_DIR=${DATA_DIR:-$CODEDIR/data/hdf5/books_wiki_en_corpus} VOLS="-v $ROOTDIR:/workspace/bert" VOLS+=" -v $DATA_DIR:/workspace/bert/data/wikipedia_corpus/pyt_hdf5_shards" diff --git a/PyTorch/LanguageModeling/BERT/utils.py b/PyTorch/LanguageModeling/BERT/utils.py new file mode 100644 index 00000000..4fae2889 --- /dev/null +++ b/PyTorch/LanguageModeling/BERT/utils.py @@ -0,0 +1,12 @@ +import torch +import torch.distributed as dist + +def get_rank(): + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + +def is_main_process(): + return get_rank() == 0 \ No newline at end of file diff --git a/PyTorch/LanguageModeling/BERT/vocab/download_models.py b/PyTorch/LanguageModeling/BERT/vocab/download_models.py deleted file mode 100644 index e671c194..00000000 --- a/PyTorch/LanguageModeling/BERT/vocab/download_models.py +++ /dev/null @@ -1,123 +0,0 @@ -# NVIDIA - -import hashlib -import urllib.request -import zipfile - -# Download urls -model_urls = { - 'bert_base_uncased' : ('https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip', 'uncased_L-12_H-768_A-12.zip'), - 'bert_large_uncased' : ('https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-24_H-1024_A-16.zip', 'uncased_L-24_H-1024_A-16.zip'), - 'bert_base_cased' : ('https://storage.googleapis.com/bert_models/2018_10_18/cased_L-12_H-768_A-12.zip', 'cased_L-12_H-768_A-12.zip'), - 'bert_large_cased' : ('https://storage.googleapis.com/bert_models/2018_10_18/cased_L-24_H-1024_A-16.zip', 'cased_L-24_H-1024_A-16.zip'), - 'bert_base_multilingual_cased' : ('https://storage.googleapis.com/bert_models/2018_11_23/multi_cased_L-12_H-768_A-12.zip', 'multi_cased_L-12_H-768_A-12.zip'), - 'bert_large_multilingual_uncased' : ('https://storage.googleapis.com/bert_models/2018_11_03/multilingual_L-12_H-768_A-12.zip', 'multilingual_L-12_H-768_A-12.zip'), - 'bert_base_chinese' : ('https://storage.googleapis.com/bert_models/2018_11_03/chinese_L-12_H-768_A-12.zip', 'chinese_L-12_H-768_A-12.zip') -} - -# SHA256sum verification for file download integrity (and checking for changes from the download source over time) -bert_base_uncased_sha = { - 'bert_config.json' : '7b4e5f53efbd058c67cda0aacfafb340113ea1b5797d9ce6ee411704ba21fcbc', - 'bert_model.ckpt.data-00000-of-00001' : '58580dc5e0bf0ae0d2efd51d0e8272b2f808857f0a43a88aaf7549da6d7a8a84', - 'bert_model.ckpt.index' : '04c1323086e2f1c5b7c0759d8d3e484afbb0ab45f51793daab9f647113a0117b', - 'bert_model.ckpt.meta' : 'dd5682170a10c3ea0280c2e9b9a45fee894eb62da649bbdea37b38b0ded5f60e', - 'vocab.txt' : '07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3', -} - -bert_large_uncased_sha = { - 'bert_config.json' : 'bfa42236d269e2aeb3a6d30412a33d15dbe8ea597e2b01dc9518c63cc6efafcb', - 'bert_model.ckpt.data-00000-of-00001' : 'bc6b3363e3be458c99ecf64b7f472d2b7c67534fd8f564c0556a678f90f4eea1', - 'bert_model.ckpt.index' : '68b52f2205ffc64dc627d1120cf399c1ef1cbc35ea5021d1afc889ffe2ce2093', - 'bert_model.ckpt.meta' : '6fcce8ff7628f229a885a593625e3d5ff9687542d5ef128d9beb1b0c05edc4a1', - 'vocab.txt' : '07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3', -} - -bert_base_cased_sha = { - 'bert_config.json' : 'f11dfb757bea16339a33e1bf327b0aade6e57fd9c29dc6b84f7ddb20682f48bc', - 'bert_model.ckpt.data-00000-of-00001' : '734d5a1b68bf98d4e9cb6b6692725d00842a1937af73902e51776905d8f760ea', - 'bert_model.ckpt.index' : '517d6ef5c41fc2ca1f595276d6fccf5521810d57f5a74e32616151557790f7b1', - 'bert_model.ckpt.meta' : '5f8a9771ff25dadd61582abb4e3a748215a10a6b55947cbb66d0f0ba1694be98', - 'vocab.txt' : 'eeaa9875b23b04b4c54ef759d03db9d1ba1554838f8fb26c5d96fa551df93d02', -} - -bert_large_cased_sha = { - 'bert_config.json' : '7adb2125c8225da495656c982fd1c5f64ba8f20ad020838571a3f8a954c2df57', - 'bert_model.ckpt.data-00000-of-00001' : '6ff33640f40d472f7a16af0c17b1179ca9dcc0373155fb05335b6a4dd1657ef0', - 'bert_model.ckpt.index' : 'ef42a53f577fbe07381f4161b13c7cab4f4fc3b167cec6a9ae382c53d18049cf', - 'bert_model.ckpt.meta' : 'd2ddff3ed33b80091eac95171e94149736ea74eb645e575d942ec4a5e01a40a1', - 'vocab.txt' : 'eeaa9875b23b04b4c54ef759d03db9d1ba1554838f8fb26c5d96fa551df93d02', -} - -bert_base_multilingual_cased_sha = { - 'bert_config.json' : 'e76c3964bc14a8bb37a5530cdc802699d2f4a6fddfab0611e153aa2528f234f0', - 'bert_model.ckpt.data-00000-of-00001' : '55b8a2df41f69c60c5180e50a7c31b7cdf6238909390c4ddf05fbc0d37aa1ac5', - 'bert_model.ckpt.index' : '7d8509c2a62b4e300feb55f8e5f1eef41638f4998dd4d887736f42d4f6a34b37', - 'bert_model.ckpt.meta' : '95e5f1997e8831f1c31e5cf530f1a2e99f121e9cd20887f2dce6fe9e3343e3fa', - 'vocab.txt' : 'fe0fda7c425b48c516fc8f160d594c8022a0808447475c1a7c6d6479763f310c', -} - -bert_large_multilingual_uncased_sha = { - 'bert_config.json' : '49063bb061390211d2fdd108cada1ed86faa5f90b80c8f6fdddf406afa4c4624', - 'bert_model.ckpt.data-00000-of-00001' : '3cd83912ebeb0efe2abf35c9f1d5a515d8e80295e61c49b75c8853f756658429', - 'bert_model.ckpt.index' : '87c372c1a3b1dc7effaaa9103c80a81b3cbab04c7933ced224eec3b8ad2cc8e7', - 'bert_model.ckpt.meta' : '27f504f34f02acaa6b0f60d65195ec3e3f9505ac14601c6a32b421d0c8413a29', - 'vocab.txt' : '87b44292b452f6c05afa49b2e488e7eedf79ea4f4c39db6f2f4b37764228ef3f', -} - -bert_base_chinese_sha = { - 'bert_config.json' : '7aaad0335058e2640bcb2c2e9a932b1cd9da200c46ea7b8957d54431f201c015', - 'bert_model.ckpt.data-00000-of-00001' : '756699356b78ad0ef1ca9ba6528297bcb3dd1aef5feadd31f4775d7c7fc989ba', - 'bert_model.ckpt.index' : '46315546e05ce62327b3e2cd1bed22836adcb2ff29735ec87721396edb21b82e', - 'bert_model.ckpt.meta' : 'c0f8d51e1ab986604bc2b25d6ec0af7fd21ff94cf67081996ec3f3bf5d823047', - 'vocab.txt' : '45bbac6b341c319adc98a532532882e91a9cefc0329aa57bac9ae761c27b291c', -} - -# Relate SHA to urls for loop below -model_sha = { - 'bert_base_uncased' : bert_base_uncased_sha, - 'bert_large_uncased' : bert_large_uncased_sha, - 'bert_base_cased' : bert_base_cased_sha, - 'bert_large_cased' : bert_large_cased_sha, - 'bert_base_multilingual_cased' : bert_base_multilingual_cased_sha, - 'bert_large_multilingual_uncased' : bert_large_multilingual_uncased_sha, - 'bert_base_chinese' : bert_base_chinese_sha -} - -# Helper to get sha256sum of a file -def sha256sum(filename): - h = hashlib.sha256() - b = bytearray(128*1024) - mv = memoryview(b) - with open(filename, 'rb', buffering=0) as f: - for n in iter(lambda : f.readinto(mv), 0): - h.update(mv[:n]) - return h.hexdigest() - -# Iterate over urls: download, unzip, verify sha256sum -found_mismatch_sha = False -for model in model_urls: - url = model_urls[model][0] - file = model_urls[model][1] - - print("Downloading", url) - response = urllib.request.urlopen(url) - with open(file, "wb") as handle: - handle.write(response.read()) - - print("Unzipping", file) - zip = zipfile.ZipFile(file, 'r') - zip.extractall() - zip.close() - - sha_dict = model_sha[model] - for extracted_file in sha_dict: - sha = sha_dict[extracted_file] - if sha != sha256sum(file[:-4] + "/" + extracted_file): - found_mismatch_sha = True - print("SHA256sum does not match on file:", extracted_file, "from download url:", url) - else: - print(file[:-4] + "/" + extracted_file, "\t", "verified") - -if not found_mismatch_sha: - print("All downloads pass sha256sum verification.") - diff --git a/PyTorch/LanguageModeling/BERT/vocab/vocab b/PyTorch/LanguageModeling/BERT/vocab/vocab deleted file mode 100644 index fb140275..00000000 --- a/PyTorch/LanguageModeling/BERT/vocab/vocab +++ /dev/null @@ -1,30522 +0,0 @@ -[PAD] -[unused0] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[UNK] -[CLS] -[SEP] -[MASK] -[unused99] -[unused100] -[unused101] -[unused102] -[unused103] -[unused104] -[unused105] -[unused106] -[unused107] -[unused108] -[unused109] -[unused110] -[unused111] -[unused112] -[unused113] -[unused114] -[unused115] -[unused116] -[unused117] -[unused118] -[unused119] -[unused120] -[unused121] -[unused122] -[unused123] -[unused124] -[unused125] -[unused126] -[unused127] -[unused128] -[unused129] -[unused130] -[unused131] -[unused132] -[unused133] -[unused134] -[unused135] -[unused136] -[unused137] -[unused138] -[unused139] -[unused140] -[unused141] -[unused142] -[unused143] -[unused144] -[unused145] -[unused146] -[unused147] -[unused148] -[unused149] -[unused150] -[unused151] -[unused152] -[unused153] -[unused154] -[unused155] -[unused156] -[unused157] -[unused158] -[unused159] -[unused160] -[unused161] -[unused162] -[unused163] -[unused164] -[unused165] -[unused166] -[unused167] -[unused168] -[unused169] -[unused170] -[unused171] -[unused172] -[unused173] -[unused174] -[unused175] -[unused176] -[unused177] -[unused178] -[unused179] -[unused180] -[unused181] -[unused182] -[unused183] -[unused184] -[unused185] -[unused186] -[unused187] -[unused188] -[unused189] -[unused190] -[unused191] -[unused192] -[unused193] -[unused194] -[unused195] -[unused196] -[unused197] -[unused198] -[unused199] -[unused200] -[unused201] -[unused202] -[unused203] -[unused204] -[unused205] -[unused206] -[unused207] -[unused208] -[unused209] -[unused210] -[unused211] -[unused212] -[unused213] -[unused214] -[unused215] -[unused216] -[unused217] -[unused218] -[unused219] -[unused220] -[unused221] -[unused222] -[unused223] -[unused224] -[unused225] -[unused226] -[unused227] -[unused228] -[unused229] -[unused230] -[unused231] -[unused232] -[unused233] -[unused234] -[unused235] -[unused236] -[unused237] -[unused238] -[unused239] -[unused240] -[unused241] -[unused242] -[unused243] -[unused244] -[unused245] -[unused246] -[unused247] -[unused248] -[unused249] -[unused250] -[unused251] -[unused252] -[unused253] -[unused254] -[unused255] -[unused256] -[unused257] -[unused258] -[unused259] -[unused260] -[unused261] -[unused262] -[unused263] -[unused264] -[unused265] -[unused266] -[unused267] -[unused268] -[unused269] -[unused270] -[unused271] -[unused272] -[unused273] -[unused274] -[unused275] -[unused276] -[unused277] -[unused278] -[unused279] -[unused280] -[unused281] -[unused282] -[unused283] -[unused284] -[unused285] -[unused286] -[unused287] -[unused288] -[unused289] -[unused290] -[unused291] -[unused292] -[unused293] -[unused294] -[unused295] -[unused296] -[unused297] -[unused298] -[unused299] -[unused300] -[unused301] -[unused302] -[unused303] -[unused304] -[unused305] -[unused306] -[unused307] -[unused308] -[unused309] -[unused310] -[unused311] -[unused312] -[unused313] -[unused314] -[unused315] -[unused316] -[unused317] -[unused318] -[unused319] -[unused320] -[unused321] -[unused322] -[unused323] -[unused324] -[unused325] -[unused326] -[unused327] -[unused328] -[unused329] -[unused330] -[unused331] -[unused332] -[unused333] -[unused334] -[unused335] -[unused336] -[unused337] -[unused338] -[unused339] -[unused340] -[unused341] -[unused342] -[unused343] -[unused344] -[unused345] -[unused346] -[unused347] -[unused348] -[unused349] -[unused350] -[unused351] -[unused352] -[unused353] -[unused354] -[unused355] -[unused356] -[unused357] -[unused358] -[unused359] -[unused360] -[unused361] -[unused362] -[unused363] -[unused364] -[unused365] -[unused366] -[unused367] -[unused368] -[unused369] -[unused370] -[unused371] -[unused372] -[unused373] -[unused374] -[unused375] -[unused376] -[unused377] -[unused378] -[unused379] -[unused380] -[unused381] -[unused382] -[unused383] -[unused384] -[unused385] -[unused386] -[unused387] -[unused388] -[unused389] -[unused390] -[unused391] -[unused392] -[unused393] -[unused394] -[unused395] -[unused396] -[unused397] -[unused398] -[unused399] -[unused400] -[unused401] -[unused402] -[unused403] -[unused404] -[unused405] -[unused406] -[unused407] -[unused408] -[unused409] -[unused410] -[unused411] -[unused412] -[unused413] -[unused414] -[unused415] -[unused416] -[unused417] -[unused418] -[unused419] -[unused420] -[unused421] -[unused422] -[unused423] -[unused424] -[unused425] -[unused426] -[unused427] -[unused428] -[unused429] -[unused430] -[unused431] -[unused432] -[unused433] -[unused434] -[unused435] -[unused436] -[unused437] -[unused438] -[unused439] -[unused440] -[unused441] -[unused442] -[unused443] -[unused444] -[unused445] -[unused446] -[unused447] -[unused448] -[unused449] -[unused450] -[unused451] -[unused452] -[unused453] -[unused454] -[unused455] -[unused456] -[unused457] -[unused458] -[unused459] -[unused460] -[unused461] -[unused462] -[unused463] -[unused464] -[unused465] -[unused466] -[unused467] -[unused468] -[unused469] -[unused470] -[unused471] -[unused472] -[unused473] -[unused474] -[unused475] -[unused476] -[unused477] -[unused478] -[unused479] -[unused480] -[unused481] -[unused482] -[unused483] -[unused484] -[unused485] -[unused486] -[unused487] -[unused488] -[unused489] -[unused490] -[unused491] -[unused492] -[unused493] -[unused494] -[unused495] -[unused496] -[unused497] -[unused498] -[unused499] -[unused500] -[unused501] -[unused502] -[unused503] -[unused504] -[unused505] -[unused506] -[unused507] -[unused508] -[unused509] -[unused510] -[unused511] -[unused512] -[unused513] -[unused514] -[unused515] -[unused516] -[unused517] -[unused518] -[unused519] -[unused520] -[unused521] -[unused522] -[unused523] -[unused524] -[unused525] -[unused526] -[unused527] -[unused528] -[unused529] -[unused530] -[unused531] -[unused532] -[unused533] -[unused534] -[unused535] -[unused536] -[unused537] -[unused538] -[unused539] -[unused540] -[unused541] -[unused542] -[unused543] -[unused544] -[unused545] -[unused546] -[unused547] -[unused548] -[unused549] -[unused550] -[unused551] -[unused552] -[unused553] -[unused554] -[unused555] -[unused556] -[unused557] -[unused558] -[unused559] -[unused560] -[unused561] -[unused562] -[unused563] -[unused564] -[unused565] -[unused566] -[unused567] -[unused568] -[unused569] -[unused570] -[unused571] -[unused572] -[unused573] -[unused574] -[unused575] -[unused576] -[unused577] -[unused578] -[unused579] -[unused580] -[unused581] -[unused582] -[unused583] -[unused584] -[unused585] -[unused586] -[unused587] -[unused588] -[unused589] -[unused590] -[unused591] -[unused592] -[unused593] -[unused594] -[unused595] -[unused596] -[unused597] -[unused598] -[unused599] -[unused600] -[unused601] -[unused602] -[unused603] -[unused604] -[unused605] -[unused606] -[unused607] -[unused608] -[unused609] -[unused610] -[unused611] -[unused612] -[unused613] -[unused614] -[unused615] -[unused616] -[unused617] -[unused618] -[unused619] -[unused620] -[unused621] -[unused622] -[unused623] -[unused624] -[unused625] -[unused626] -[unused627] -[unused628] -[unused629] -[unused630] -[unused631] -[unused632] -[unused633] -[unused634] -[unused635] -[unused636] -[unused637] -[unused638] -[unused639] -[unused640] -[unused641] -[unused642] -[unused643] -[unused644] -[unused645] -[unused646] -[unused647] -[unused648] -[unused649] -[unused650] -[unused651] -[unused652] -[unused653] -[unused654] -[unused655] -[unused656] -[unused657] -[unused658] -[unused659] -[unused660] -[unused661] -[unused662] -[unused663] -[unused664] -[unused665] -[unused666] -[unused667] -[unused668] -[unused669] -[unused670] -[unused671] -[unused672] -[unused673] -[unused674] -[unused675] -[unused676] -[unused677] -[unused678] -[unused679] -[unused680] -[unused681] -[unused682] -[unused683] -[unused684] -[unused685] -[unused686] -[unused687] -[unused688] -[unused689] -[unused690] -[unused691] -[unused692] -[unused693] -[unused694] -[unused695] -[unused696] -[unused697] -[unused698] -[unused699] -[unused700] -[unused701] -[unused702] -[unused703] -[unused704] -[unused705] -[unused706] -[unused707] -[unused708] -[unused709] -[unused710] -[unused711] -[unused712] -[unused713] -[unused714] -[unused715] -[unused716] -[unused717] -[unused718] -[unused719] -[unused720] -[unused721] -[unused722] -[unused723] -[unused724] -[unused725] -[unused726] -[unused727] -[unused728] -[unused729] -[unused730] -[unused731] -[unused732] -[unused733] -[unused734] -[unused735] -[unused736] -[unused737] -[unused738] -[unused739] -[unused740] -[unused741] -[unused742] -[unused743] -[unused744] -[unused745] -[unused746] -[unused747] -[unused748] -[unused749] -[unused750] -[unused751] -[unused752] -[unused753] -[unused754] -[unused755] -[unused756] -[unused757] -[unused758] -[unused759] -[unused760] -[unused761] -[unused762] -[unused763] -[unused764] -[unused765] -[unused766] -[unused767] -[unused768] -[unused769] -[unused770] -[unused771] -[unused772] -[unused773] -[unused774] -[unused775] -[unused776] -[unused777] -[unused778] -[unused779] -[unused780] -[unused781] -[unused782] -[unused783] -[unused784] -[unused785] -[unused786] -[unused787] -[unused788] -[unused789] -[unused790] -[unused791] -[unused792] -[unused793] -[unused794] -[unused795] -[unused796] -[unused797] -[unused798] -[unused799] -[unused800] -[unused801] -[unused802] -[unused803] -[unused804] -[unused805] -[unused806] -[unused807] -[unused808] -[unused809] -[unused810] -[unused811] -[unused812] -[unused813] -[unused814] -[unused815] -[unused816] -[unused817] -[unused818] -[unused819] -[unused820] -[unused821] -[unused822] -[unused823] -[unused824] -[unused825] -[unused826] -[unused827] -[unused828] -[unused829] -[unused830] -[unused831] -[unused832] -[unused833] -[unused834] -[unused835] -[unused836] -[unused837] -[unused838] -[unused839] -[unused840] -[unused841] -[unused842] -[unused843] -[unused844] -[unused845] -[unused846] -[unused847] -[unused848] -[unused849] -[unused850] -[unused851] -[unused852] -[unused853] -[unused854] -[unused855] -[unused856] -[unused857] -[unused858] -[unused859] -[unused860] -[unused861] -[unused862] -[unused863] -[unused864] -[unused865] -[unused866] -[unused867] -[unused868] -[unused869] -[unused870] -[unused871] -[unused872] -[unused873] -[unused874] -[unused875] -[unused876] -[unused877] -[unused878] -[unused879] -[unused880] -[unused881] -[unused882] -[unused883] -[unused884] -[unused885] -[unused886] -[unused887] -[unused888] -[unused889] -[unused890] -[unused891] -[unused892] -[unused893] -[unused894] -[unused895] -[unused896] -[unused897] -[unused898] -[unused899] -[unused900] -[unused901] -[unused902] -[unused903] -[unused904] -[unused905] -[unused906] -[unused907] -[unused908] -[unused909] -[unused910] -[unused911] -[unused912] -[unused913] -[unused914] -[unused915] -[unused916] -[unused917] -[unused918] -[unused919] -[unused920] -[unused921] -[unused922] -[unused923] -[unused924] -[unused925] -[unused926] -[unused927] -[unused928] -[unused929] -[unused930] -[unused931] -[unused932] -[unused933] -[unused934] -[unused935] -[unused936] -[unused937] -[unused938] -[unused939] -[unused940] -[unused941] -[unused942] -[unused943] -[unused944] -[unused945] -[unused946] -[unused947] -[unused948] -[unused949] -[unused950] -[unused951] -[unused952] -[unused953] -[unused954] -[unused955] -[unused956] -[unused957] -[unused958] -[unused959] -[unused960] -[unused961] -[unused962] -[unused963] -[unused964] -[unused965] -[unused966] -[unused967] -[unused968] -[unused969] -[unused970] -[unused971] -[unused972] -[unused973] -[unused974] -[unused975] -[unused976] -[unused977] -[unused978] -[unused979] -[unused980] -[unused981] -[unused982] -[unused983] -[unused984] -[unused985] -[unused986] -[unused987] -[unused988] -[unused989] -[unused990] -[unused991] -[unused992] -[unused993] -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -¡ -¢ -£ -¤ -¥ -¦ -§ -¨ -© -ª -« -¬ -® -° -± -² -³ -´ -µ -¶ -· -¹ -º -» -¼ -½ -¾ -¿ -× -ß -æ -ð -÷ -ø -þ -đ -ħ -ı -ł -ŋ -œ -ƒ -ɐ -ɑ -ɒ -ɔ -ɕ -ə -ɛ -ɡ -ɣ -ɨ -ɪ -ɫ -ɬ -ɯ -ɲ -ɴ -ɹ -ɾ -ʀ -ʁ -ʂ -ʃ -ʉ -ʊ -ʋ -ʌ -ʎ -ʐ -ʑ -ʒ -ʔ -ʰ -ʲ -ʳ -ʷ -ʸ -ʻ -ʼ -ʾ -ʿ -ˈ -ː -ˡ -ˢ -ˣ -ˤ -α -β -γ -δ -ε -ζ -η -θ -ι -κ -λ -μ -ν -ξ -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -щ -ъ -ы -ь -э -ю -я -ђ -є -і -ј -љ -њ -ћ -ӏ -ա -բ -գ -դ -ե -թ -ի -լ -կ -հ -մ -յ -ն -ո -պ -ս -վ -տ -ր -ւ -ք -־ -א -ב -ג -ד -ה -ו -ז -ח -ט -י -ך -כ -ל -ם -מ -ן -נ -ס -ע -ף -פ -ץ -צ -ק -ר -ש -ת -، -ء -ا -ب -ة -ت -ث -ج -ح -خ -د -ذ -ر -ز -س -ش -ص -ض -ط -ظ -ع -غ -ـ -ف -ق -ك -ل -م -ن -ه -و -ى -ي -ٹ -پ -چ -ک -گ -ں -ھ -ہ -ی -ے -अ -आ -उ -ए -क -ख -ग -च -ज -ट -ड -ण -त -थ -द -ध -न -प -ब -भ -म -य -र -ल -व -श -ष -स -ह -ा -ि -ी -ो -। -॥ -ং -অ -আ -ই -উ -এ -ও -ক -খ -গ -চ -ছ -জ -ট -ড -ণ -ত -থ -দ -ধ -ন -প -ব -ভ -ম -য -র -ল -শ -ষ -স -হ -া -ি -ী -ে -க -ச -ட -த -ந -ன -ப -ம -ய -ர -ல -ள -வ -ா -ி -ு -ே -ை -ನ -ರ -ಾ -ක -ය -ර -ල -ව -ා -ก -ง -ต -ท -น -พ -ม -ย -ร -ล -ว -ส -อ -า -เ -་ -། -ག -ང -ད -ན -པ -བ -མ -འ -ར -ལ -ས -မ -ა -ბ -გ -დ -ე -ვ -თ -ი -კ -ლ -მ -ნ -ო -რ -ს -ტ -უ -ᄀ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄉ -ᄊ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅥ -ᅦ -ᅧ -ᅩ -ᅪ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆼ -ᴬ -ᴮ -ᴰ -ᴵ -ᴺ -ᵀ -ᵃ -ᵇ -ᵈ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵖ -ᵗ -ᵘ -ᵢ -ᵣ -ᵤ -ᵥ -ᶜ -ᶠ -‐ -‑ -‒ -– -— -― -‖ -‘ -’ -‚ -“ -” -„ -† -‡ -• -… -‰ -′ -″ -› -‿ -⁄ -⁰ -ⁱ -⁴ -⁵ -⁶ -⁷ -⁸ -⁹ -⁺ -⁻ -ⁿ -₀ -₁ -₂ -₃ -₄ -₅ -₆ -₇ -₈ -₉ -₊ -₍ -₎ -ₐ -ₑ -ₒ -ₓ -ₕ -ₖ -ₗ -ₘ -ₙ -ₚ -ₛ -ₜ -₤ -₩ -€ -₱ -₹ -ℓ -№ -ℝ -™ -⅓ -⅔ -← -↑ -→ -↓ -↔ -↦ -⇄ -⇌ -⇒ -∂ -∅ -∆ -∇ -∈ -− -∗ -∘ -√ -∞ -∧ -∨ -∩ -∪ -≈ -≡ -≤ -≥ -⊂ -⊆ -⊕ -⊗ -⋅ -─ -│ -■ -▪ -● -★ -☆ -☉ -♠ -♣ -♥ -♦ -♭ -♯ -⟨ -⟩ -ⱼ -⺩ -⺼ -⽥ -、 -。 -〈 -〉 -《 -》 -「 -」 -『 -』 -〜 -あ -い -う -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -や -ゆ -よ -ら -り -る -れ -ろ -を -ん -ァ -ア -ィ -イ -ウ -ェ -エ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ュ -ョ -ラ -リ -ル -レ -ロ -ワ -ン -・ -ー -一 -三 -上 -下 -不 -世 -中 -主 -久 -之 -也 -事 -二 -五 -井 -京 -人 -亻 -仁 -介 -代 -仮 -伊 -会 -佐 -侍 -保 -信 -健 -元 -光 -八 -公 -内 -出 -分 -前 -劉 -力 -加 -勝 -北 -区 -十 -千 -南 -博 -原 -口 -古 -史 -司 -合 -吉 -同 -名 -和 -囗 -四 -国 -國 -土 -地 -坂 -城 -堂 -場 -士 -夏 -外 -大 -天 -太 -夫 -奈 -女 -子 -学 -宀 -宇 -安 -宗 -定 -宣 -宮 -家 -宿 -寺 -將 -小 -尚 -山 -岡 -島 -崎 -川 -州 -巿 -帝 -平 -年 -幸 -广 -弘 -張 -彳 -後 -御 -德 -心 -忄 -志 -忠 -愛 -成 -我 -戦 -戸 -手 -扌 -政 -文 -新 -方 -日 -明 -星 -春 -昭 -智 -曲 -書 -月 -有 -朝 -木 -本 -李 -村 -東 -松 -林 -森 -楊 -樹 -橋 -歌 -止 -正 -武 -比 -氏 -民 -水 -氵 -氷 -永 -江 -沢 -河 -治 -法 -海 -清 -漢 -瀬 -火 -版 -犬 -王 -生 -田 -男 -疒 -発 -白 -的 -皇 -目 -相 -省 -真 -石 -示 -社 -神 -福 -禾 -秀 -秋 -空 -立 -章 -竹 -糹 -美 -義 -耳 -良 -艹 -花 -英 -華 -葉 -藤 -行 -街 -西 -見 -訁 -語 -谷 -貝 -貴 -車 -軍 -辶 -道 -郎 -郡 -部 -都 -里 -野 -金 -鈴 -镇 -長 -門 -間 -阝 -阿 -陳 -陽 -雄 -青 -面 -風 -食 -香 -馬 -高 -龍 -龸 -fi -fl -! -( -) -, -- -. -/ -: -? -~ -the -of -and -in -to -was -he -is -as -for -on -with -that -it -his -by -at -from -her -##s -she -you -had -an -were -but -be -this -are -not -my -they -one -which -or -have -him -me -first -all -also -their -has -up -who -out -been -when -after -there -into -new -two -its -##a -time -would -no -what -about -said -we -over -then -other -so -more -##e -can -if -like -back -them -only -some -could -##i -where -just -##ing -during -before -##n -do -##o -made -school -through -than -now -years -most -world -may -between -down -well -three -##d -year -while -will -##ed -##r -##y -later -##t -city -under -around -did -such -being -used -state -people -part -know -against -your -many -second -university -both -national -##er -these -don -known -off -way -until -re -how -even -get -head -... -didn -##ly -team -american -because -de -##l -born -united -film -since -still -long -work -south -us -became -any -high -again -day -family -see -right -man -eyes -house -season -war -states -including -took -life -north -same -each -called -name -much -place -however -go -four -group -another -found -won -area -here -going -10 -away -series -left -home -music -best -make -hand -number -company -several -never -last -john -000 -very -album -take -end -good -too -following -released -game -played -little -began -district -##m -old -want -those -side -held -own -early -county -ll -league -use -west -##u -face -think -##es -2010 -government -##h -march -came -small -general -town -june -##on -line -based -something -##k -september -thought -looked -along -international -2011 -air -july -club -went -january -october -our -august -april -york -12 -few -2012 -2008 -east -show -member -college -2009 -father -public -##us -come -men -five -set -station -church -##c -next -former -november -room -party -located -december -2013 -age -got -2007 -##g -system -let -love -2006 -though -every -2014 -look -song -water -century -without -body -black -night -within -great -women -single -ve -building -large -population -river -named -band -white -started -##an -once -15 -20 -should -18 -2015 -service -top -built -british -open -death -king -moved -local -times -children -february -book -why -11 -door -need -president -order -final -road -wasn -although -due -major -died -village -third -knew -2016 -asked -turned -st -wanted -say -##p -together -received -main -son -served -different -##en -behind -himself -felt -members -power -football -law -voice -play -##in -near -park -history -30 -having -2005 -16 -##man -saw -mother -##al -army -point -front -help -english -street -art -late -hands -games -award -##ia -young -14 -put -published -country -division -across -told -13 -often -ever -french -london -center -six -red -2017 -led -days -include -light -25 -find -tell -among -species -really -according -central -half -2004 -form -original -gave -office -making -enough -lost -full -opened -must -included -live -given -german -player -run -business -woman -community -cup -might -million -land -2000 -court -development -17 -short -round -ii -km -seen -class -story -always -become -sure -research -almost -director -council -la -##2 -career -things -using -island -##z -couldn -car -##is -24 -close -force -##1 -better -free -support -control -field -students -2003 -education -married -##b -nothing -worked -others -record -big -inside -level -anything -continued -give -james -##3 -military -established -non -returned -feel -does -title -written -thing -feet -william -far -co -association -hard -already -2002 -##ra -championship -human -western -100 -##na -department -hall -role -various -production -21 -19 -heart -2001 -living -fire -version -##ers -##f -television -royal -##4 -produced -working -act -case -society -region -present -radio -period -looking -least -total -keep -england -wife -program -per -brother -mind -special -22 -##le -am -works -soon -##6 -political -george -services -taken -created -##7 -further -able -reached -david -union -joined -upon -done -important -social -information -either -##ic -##x -appeared -position -ground -lead -rock -dark -election -23 -board -france -hair -course -arms -site -police -girl -instead -real -sound -##v -words -moment -##te -someone -##8 -summer -project -announced -san -less -wrote -past -followed -##5 -blue -founded -al -finally -india -taking -records -america -##ne -1999 -design -considered -northern -god -stop -battle -toward -european -outside -described -track -today -playing -language -28 -call -26 -heard -professional -low -australia -miles -california -win -yet -green -##ie -trying -blood -##ton -southern -science -maybe -everything -match -square -27 -mouth -video -race -recorded -leave -above -##9 -daughter -points -space -1998 -museum -change -middle -common -##0 -move -tv -post -##ta -lake -seven -tried -elected -closed -ten -paul -minister -##th -months -start -chief -return -canada -person -sea -release -similar -modern -brought -rest -hit -formed -mr -##la -1997 -floor -event -doing -thomas -1996 -robert -care -killed -training -star -week -needed -turn -finished -railway -rather -news -health -sent -example -ran -term -michael -coming -currently -yes -forces -despite -gold -areas -50 -stage -fact -29 -dead -says -popular -2018 -originally -germany -probably -developed -result -pulled -friend -stood -money -running -mi -signed -word -songs -child -eventually -met -tour -average -teams -minutes -festival -current -deep -kind -1995 -decided -usually -eastern -seemed -##ness -episode -bed -added -table -indian -private -charles -route -available -idea -throughout -centre -addition -appointed -style -1994 -books -eight -construction -press -mean -wall -friends -remained -schools -study -##ch -##um -institute -oh -chinese -sometimes -events -possible -1992 -australian -type -brown -forward -talk -process -food -debut -seat -performance -committee -features -character -arts -herself -else -lot -strong -russian -range -hours -peter -arm -##da -morning -dr -sold -##ry -quickly -directed -1993 -guitar -china -##w -31 -list -##ma -performed -media -uk -players -smile -##rs -myself -40 -placed -coach -province -towards -wouldn -leading -whole -boy -official -designed -grand -census -##el -europe -attack -japanese -henry -1991 -##re -##os -cross -getting -alone -action -lower -network -wide -washington -japan -1990 -hospital -believe -changed -sister -##ar -hold -gone -sir -hadn -ship -##ka -studies -academy -shot -rights -below -base -bad -involved -kept -largest -##ist -bank -future -especially -beginning -mark -movement -section -female -magazine -plan -professor -lord -longer -##ian -sat -walked -hill -actually -civil -energy -model -families -size -thus -aircraft -completed -includes -data -captain -##or -fight -vocals -featured -richard -bridge -fourth -1989 -officer -stone -hear -##ism -means -medical -groups -management -self -lips -competition -entire -lived -technology -leaving -federal -tournament -bit -passed -hot -independent -awards -kingdom -mary -spent -fine -doesn -reported -##ling -jack -fall -raised -itself -stay -true -studio -1988 -sports -replaced -paris -systems -saint -leader -theatre -whose -market -capital -parents -spanish -canadian -earth -##ity -cut -degree -writing -bay -christian -awarded -natural -higher -bill -##as -coast -provided -previous -senior -ft -valley -organization -stopped -onto -countries -parts -conference -queen -security -interest -saying -allowed -master -earlier -phone -matter -smith -winning -try -happened -moving -campaign -los -##ley -breath -nearly -mid -1987 -certain -girls -date -italian -african -standing -fell -artist -##ted -shows -deal -mine -industry -1986 -##ng -everyone -republic -provide -collection -library -student -##ville -primary -owned -older -via -heavy -1st -makes -##able -attention -anyone -africa -##ri -stated -length -ended -fingers -command -staff -skin -foreign -opening -governor -okay -medal -kill -sun -cover -job -1985 -introduced -chest -hell -feeling -##ies -success -meet -reason -standard -meeting -novel -1984 -trade -source -buildings -##land -rose -guy -goal -##ur -chapter -native -husband -previously -unit -limited -entered -weeks -producer -operations -mountain -takes -covered -forced -related -roman -complete -successful -key -texas -cold -##ya -channel -1980 -traditional -films -dance -clear -approximately -500 -nine -van -prince -question -active -tracks -ireland -regional -silver -author -personal -sense -operation -##ine -economic -1983 -holding -twenty -isbn -additional -speed -hour -edition -regular -historic -places -whom -shook -movie -km² -secretary -prior -report -chicago -read -foundation -view -engine -scored -1982 -units -ask -airport -property -ready -immediately -lady -month -listed -contract -##de -manager -themselves -lines -##ki -navy -writer -meant -##ts -runs -##ro -practice -championships -singer -glass -commission -required -forest -starting -culture -generally -giving -access -attended -test -couple -stand -catholic -martin -caught -executive -##less -eye -##ey -thinking -chair -quite -shoulder -1979 -hope -decision -plays -defeated -municipality -whether -structure -offered -slowly -pain -ice -direction -##ion -paper -mission -1981 -mostly -200 -noted -individual -managed -nature -lives -plant -##ha -helped -except -studied -computer -figure -relationship -issue -significant -loss -die -smiled -gun -ago -highest -1972 -##am -male -bring -goals -mexico -problem -distance -commercial -completely -location -annual -famous -drive -1976 -neck -1978 -surface -caused -italy -understand -greek -highway -wrong -hotel -comes -appearance -joseph -double -issues -musical -companies -castle -income -review -assembly -bass -initially -parliament -artists -experience -1974 -particular -walk -foot -engineering -talking -window -dropped -##ter -miss -baby -boys -break -1975 -stars -edge -remember -policy -carried -train -stadium -bar -sex -angeles -evidence -##ge -becoming -assistant -soviet -1977 -upper -step -wing -1970 -youth -financial -reach -##ll -actor -numerous -##se -##st -nodded -arrived -##ation -minute -##nt -believed -sorry -complex -beautiful -victory -associated -temple -1968 -1973 -chance -perhaps -metal -##son -1945 -bishop -##et -lee -launched -particularly -tree -le -retired -subject -prize -contains -yeah -theory -empire -##ce -suddenly -waiting -trust -recording -##to -happy -terms -camp -champion -1971 -religious -pass -zealand -names -2nd -port -ancient -tom -corner -represented -watch -legal -anti -justice -cause -watched -brothers -45 -material -changes -simply -response -louis -fast -##ting -answer -60 -historical -1969 -stories -straight -create -feature -increased -rate -administration -virginia -el -activities -cultural -overall -winner -programs -basketball -legs -guard -beyond -cast -doctor -mm -flight -results -remains -cost -effect -winter -##ble -larger -islands -problems -chairman -grew -commander -isn -1967 -pay -failed -selected -hurt -fort -box -regiment -majority -journal -35 -edward -plans -##ke -##ni -shown -pretty -irish -characters -directly -scene -likely -operated -allow -spring -##j -junior -matches -looks -mike -houses -fellow -##tion -beach -marriage -##ham -##ive -rules -oil -65 -florida -expected -nearby -congress -sam -peace -recent -iii -wait -subsequently -cell -##do -variety -serving -agreed -please -poor -joe -pacific -attempt -wood -democratic -piece -prime -##ca -rural -mile -touch -appears -township -1964 -1966 -soldiers -##men -##ized -1965 -pennsylvania -closer -fighting -claimed -score -jones -physical -editor -##ous -filled -genus -specific -sitting -super -mom -##va -therefore -supported -status -fear -cases -store -meaning -wales -minor -spain -tower -focus -vice -frank -follow -parish -separate -golden -horse -fifth -remaining -branch -32 -presented -stared -##id -uses -secret -forms -##co -baseball -exactly -##ck -choice -note -discovered -travel -composed -truth -russia -ball -color -kiss -dad -wind -continue -ring -referred -numbers -digital -greater -##ns -metres -slightly -direct -increase -1960 -responsible -crew -rule -trees -troops -##no -broke -goes -individuals -hundred -weight -creek -sleep -memory -defense -provides -ordered -code -value -jewish -windows -1944 -safe -judge -whatever -corps -realized -growing -pre -##ga -cities -alexander -gaze -lies -spread -scott -letter -showed -situation -mayor -transport -watching -workers -extended -##li -expression -normal -##ment -chart -multiple -border -##ba -host -##ner -daily -mrs -walls -piano -##ko -heat -cannot -##ate -earned -products -drama -era -authority -seasons -join -grade -##io -sign -difficult -machine -1963 -territory -mainly -##wood -stations -squadron -1962 -stepped -iron -19th -##led -serve -appear -sky -speak -broken -charge -knowledge -kilometres -removed -ships -article -campus -simple -##ty -pushed -britain -##ve -leaves -recently -cd -soft -boston -latter -easy -acquired -poland -##sa -quality -officers -presence -planned -nations -mass -broadcast -jean -share -image -influence -wild -offer -emperor -electric -reading -headed -ability -promoted -yellow -ministry -1942 -throat -smaller -politician -##by -latin -spoke -cars -williams -males -lack -pop -80 -##ier -acting -seeing -consists -##ti -estate -1961 -pressure -johnson -newspaper -jr -chris -olympics -online -conditions -beat -elements -walking -vote -##field -needs -carolina -text -featuring -global -block -shirt -levels -francisco -purpose -females -et -dutch -duke -ahead -gas -twice -safety -serious -turning -highly -lieutenant -firm -maria -amount -mixed -daniel -proposed -perfect -agreement -affairs -3rd -seconds -contemporary -paid -1943 -prison -save -kitchen -label -administrative -intended -constructed -academic -nice -teacher -races -1956 -formerly -corporation -ben -nation -issued -shut -1958 -drums -housing -victoria -seems -opera -1959 -graduated -function -von -mentioned -picked -build -recognized -shortly -protection -picture -notable -exchange -elections -1980s -loved -percent -racing -fish -elizabeth -garden -volume -hockey -1941 -beside -settled -##ford -1940 -competed -replied -drew -1948 -actress -marine -scotland -steel -glanced -farm -steve -1957 -risk -tonight -positive -magic -singles -effects -gray -screen -dog -##ja -residents -bus -sides -none -secondary -literature -polish -destroyed -flying -founder -households -1939 -lay -reserve -usa -gallery -##ler -1946 -industrial -younger -approach -appearances -urban -ones -1950 -finish -avenue -powerful -fully -growth -page -honor -jersey -projects -advanced -revealed -basic -90 -infantry -pair -equipment -visit -33 -evening -search -grant -effort -solo -treatment -buried -republican -primarily -bottom -owner -1970s -israel -gives -jim -dream -bob -remain -spot -70 -notes -produce -champions -contact -ed -soul -accepted -ways -del -##ally -losing -split -price -capacity -basis -trial -questions -##ina -1955 -20th -guess -officially -memorial -naval -initial -##ization -whispered -median -engineer -##ful -sydney -##go -columbia -strength -300 -1952 -tears -senate -00 -card -asian -agent -1947 -software -44 -draw -warm -supposed -com -pro -##il -transferred -leaned -##at -candidate -escape -mountains -asia -potential -activity -entertainment -seem -traffic -jackson -murder -36 -slow -product -orchestra -haven -agency -bbc -taught -website -comedy -unable -storm -planning -albums -rugby -environment -scientific -grabbed -protect -##hi -boat -typically -1954 -1953 -damage -principal -divided -dedicated -mount -ohio -##berg -pick -fought -driver -##der -empty -shoulders -sort -thank -berlin -prominent -account -freedom -necessary -efforts -alex -headquarters -follows -alongside -des -simon -andrew -suggested -operating -learning -steps -1949 -sweet -technical -begin -easily -34 -teeth -speaking -settlement -scale -##sh -renamed -ray -max -enemy -semi -joint -compared -##rd -scottish -leadership -analysis -offers -georgia -pieces -captured -animal -deputy -guest -organized -##lin -tony -combined -method -challenge -1960s -huge -wants -battalion -sons -rise -crime -types -facilities -telling -path -1951 -platform -sit -1990s -##lo -tells -assigned -rich -pull -##ot -commonly -alive -##za -letters -concept -conducted -wearing -happen -bought -becomes -holy -gets -ocean -defeat -languages -purchased -coffee -occurred -titled -##q -declared -applied -sciences -concert -sounds -jazz -brain -##me -painting -fleet -tax -nick -##ius -michigan -count -animals -leaders -episodes -##line -content -##den -birth -##it -clubs -64 -palace -critical -refused -fair -leg -laughed -returning -surrounding -participated -formation -lifted -pointed -connected -rome -medicine -laid -taylor -santa -powers -adam -tall -shared -focused -knowing -yards -entrance -falls -##wa -calling -##ad -sources -chosen -beneath -resources -yard -##ite -nominated -silence -zone -defined -##que -gained -thirty -38 -bodies -moon -##ard -adopted -christmas -widely -register -apart -iran -premier -serves -du -unknown -parties -##les -generation -##ff -continues -quick -fields -brigade -quiet -teaching -clothes -impact -weapons -partner -flat -theater -supreme -1938 -37 -relations -##tor -plants -suffered -1936 -wilson -kids -begins -##age -1918 -seats -armed -internet -models -worth -laws -400 -communities -classes -background -knows -thanks -quarter -reaching -humans -carry -killing -format -kong -hong -setting -75 -architecture -disease -railroad -inc -possibly -wish -arthur -thoughts -harry -doors -density -##di -crowd -illinois -stomach -tone -unique -reports -anyway -##ir -liberal -der -vehicle -thick -dry -drug -faced -largely -facility -theme -holds -creation -strange -colonel -##mi -revolution -bell -politics -turns -silent -rail -relief -independence -combat -shape -write -determined -sales -learned -4th -finger -oxford -providing -1937 -heritage -fiction -situated -designated -allowing -distribution -hosted -##est -sight -interview -estimated -reduced -##ria -toronto -footballer -keeping -guys -damn -claim -motion -sport -sixth -stayed -##ze -en -rear -receive -handed -twelve -dress -audience -granted -brazil -##well -spirit -##ated -noticed -etc -olympic -representative -eric -tight -trouble -reviews -drink -vampire -missing -roles -ranked -newly -household -finals -wave -critics -##ee -phase -massachusetts -pilot -unlike -philadelphia -bright -guns -crown -organizations -roof -42 -respectively -clearly -tongue -marked -circle -fox -korea -bronze -brian -expanded -sexual -supply -yourself -inspired -labour -fc -##ah -reference -vision -draft -connection -brand -reasons -1935 -classic -driving -trip -jesus -cells -entry -1920 -neither -trail -claims -atlantic -orders -labor -nose -afraid -identified -intelligence -calls -cancer -attacked -passing -stephen -positions -imperial -grey -jason -39 -sunday -48 -swedish -avoid -extra -uncle -message -covers -allows -surprise -materials -fame -hunter -##ji -1930 -citizens -figures -davis -environmental -confirmed -shit -titles -di -performing -difference -acts -attacks -##ov -existing -votes -opportunity -nor -shop -entirely -trains -opposite -pakistan -##pa -develop -resulted -representatives -actions -reality -pressed -##ish -barely -wine -conversation -faculty -northwest -ends -documentary -nuclear -stock -grace -sets -eat -alternative -##ps -bag -resulting -creating -surprised -cemetery -1919 -drop -finding -sarah -cricket -streets -tradition -ride -1933 -exhibition -target -ear -explained -rain -composer -injury -apartment -municipal -educational -occupied -netherlands -clean -billion -constitution -learn -1914 -maximum -classical -francis -lose -opposition -jose -ontario -bear -core -hills -rolled -ending -drawn -permanent -fun -##tes -##lla -lewis -sites -chamber -ryan -##way -scoring -height -1934 -##house -lyrics -staring -55 -officials -1917 -snow -oldest -##tic -orange -##ger -qualified -interior -apparently -succeeded -thousand -dinner -lights -existence -fans -heavily -41 -greatest -conservative -send -bowl -plus -enter -catch -##un -economy -duty -1929 -speech -authorities -princess -performances -versions -shall -graduate -pictures -effective -remembered -poetry -desk -crossed -starring -starts -passenger -sharp -##ant -acres -ass -weather -falling -rank -fund -supporting -check -adult -publishing -heads -cm -southeast -lane -##burg -application -bc -##ura -les -condition -transfer -prevent -display -ex -regions -earl -federation -cool -relatively -answered -besides -1928 -obtained -portion -##town -mix -##ding -reaction -liked -dean -express -peak -1932 -##tte -counter -religion -chain -rare -miller -convention -aid -lie -vehicles -mobile -perform -squad -wonder -lying -crazy -sword -##ping -attempted -centuries -weren -philosophy -category -##ize -anna -interested -47 -sweden -wolf -frequently -abandoned -kg -literary -alliance -task -entitled -##ay -threw -promotion -factory -tiny -soccer -visited -matt -fm -achieved -52 -defence -internal -persian -43 -methods -##ging -arrested -otherwise -cambridge -programming -villages -elementary -districts -rooms -criminal -conflict -worry -trained -1931 -attempts -waited -signal -bird -truck -subsequent -programme -##ol -ad -49 -communist -details -faith -sector -patrick -carrying -laugh -##ss -controlled -korean -showing -origin -fuel -evil -1927 -##ent -brief -identity -darkness -address -pool -missed -publication -web -planet -ian -anne -wings -invited -##tt -briefly -standards -kissed -##be -ideas -climate -causing -walter -worse -albert -articles -winners -desire -aged -northeast -dangerous -gate -doubt -1922 -wooden -multi -##ky -poet -rising -funding -46 -communications -communication -violence -copies -prepared -ford -investigation -skills -1924 -pulling -electronic -##ak -##ial -##han -containing -ultimately -offices -singing -understanding -restaurant -tomorrow -fashion -christ -ward -da -pope -stands -5th -flow -studios -aired -commissioned -contained -exist -fresh -americans -##per -wrestling -approved -kid -employed -respect -suit -1925 -angel -asking -increasing -frame -angry -selling -1950s -thin -finds -##nd -temperature -statement -ali -explain -inhabitants -towns -extensive -narrow -51 -jane -flowers -images -promise -somewhere -object -fly -closely -##ls -1912 -bureau -cape -1926 -weekly -presidential -legislative -1921 -##ai -##au -launch -founding -##ny -978 -##ring -artillery -strike -un -institutions -roll -writers -landing -chose -kevin -anymore -pp -##ut -attorney -fit -dan -billboard -receiving -agricultural -breaking -sought -dave -admitted -lands -mexican -##bury -charlie -specifically -hole -iv -howard -credit -moscow -roads -accident -1923 -proved -wear -struck -hey -guards -stuff -slid -expansion -1915 -cat -anthony -##kin -melbourne -opposed -sub -southwest -architect -failure -plane -1916 -##ron -map -camera -tank -listen -regarding -wet -introduction -metropolitan -link -ep -fighter -inch -grown -gene -anger -fixed -buy -dvd -khan -domestic -worldwide -chapel -mill -functions -examples -##head -developing -1910 -turkey -hits -pocket -antonio -papers -grow -unless -circuit -18th -concerned -attached -journalist -selection -journey -converted -provincial -painted -hearing -aren -bands -negative -aside -wondered -knight -lap -survey -ma -##ow -noise -billy -##ium -shooting -guide -bedroom -priest -resistance -motor -homes -sounded -giant -##mer -150 -scenes -equal -comic -patients -hidden -solid -actual -bringing -afternoon -touched -funds -wedding -consisted -marie -canal -sr -kim -treaty -turkish -recognition -residence -cathedral -broad -knees -incident -shaped -fired -norwegian -handle -cheek -contest -represent -##pe -representing -beauty -##sen -birds -advantage -emergency -wrapped -drawing -notice -pink -broadcasting -##ong -somehow -bachelor -seventh -collected -registered -establishment -alan -assumed -chemical -personnel -roger -retirement -jeff -portuguese -wore -tied -device -threat -progress -advance -##ised -banks -hired -manchester -nfl -teachers -structures -forever -##bo -tennis -helping -saturday -sale -applications -junction -hip -incorporated -neighborhood -dressed -ceremony -##ds -influenced -hers -visual -stairs -decades -inner -kansas -hung -hoped -gain -scheduled -downtown -engaged -austria -clock -norway -certainly -pale -protected -1913 -victor -employees -plate -putting -surrounded -##ists -finishing -blues -tropical -##ries -minnesota -consider -philippines -accept -54 -retrieved -1900 -concern -anderson -properties -institution -gordon -successfully -vietnam -##dy -backing -outstanding -muslim -crossing -folk -producing -usual -demand -occurs -observed -lawyer -educated -##ana -kelly -string -pleasure -budget -items -quietly -colorado -philip -typical -##worth -derived -600 -survived -asks -mental -##ide -56 -jake -jews -distinguished -ltd -1911 -sri -extremely -53 -athletic -loud -thousands -worried -shadow -transportation -horses -weapon -arena -importance -users -tim -objects -contributed -dragon -douglas -aware -senator -johnny -jordan -sisters -engines -flag -investment -samuel -shock -capable -clark -row -wheel -refers -session -familiar -biggest -wins -hate -maintained -drove -hamilton -request -expressed -injured -underground -churches -walker -wars -tunnel -passes -stupid -agriculture -softly -cabinet -regarded -joining -indiana -##ea -##ms -push -dates -spend -behavior -woods -protein -gently -chase -morgan -mention -burning -wake -combination -occur -mirror -leads -jimmy -indeed -impossible -singapore -paintings -covering -##nes -soldier -locations -attendance -sell -historian -wisconsin -invasion -argued -painter -diego -changing -egypt -##don -experienced -inches -##ku -missouri -vol -grounds -spoken -switzerland -##gan -reform -rolling -ha -forget -massive -resigned -burned -allen -tennessee -locked -values -improved -##mo -wounded -universe -sick -dating -facing -pack -purchase -user -##pur -moments -##ul -merged -anniversary -1908 -coal -brick -understood -causes -dynasty -queensland -establish -stores -crisis -promote -hoping -views -cards -referee -extension -##si -raise -arizona -improve -colonial -formal -charged -##rt -palm -lucky -hide -rescue -faces -95 -feelings -candidates -juan -##ell -goods -6th -courses -weekend -59 -luke -cash -fallen -##om -delivered -affected -installed -carefully -tries -swiss -hollywood -costs -lincoln -responsibility -##he -shore -file -proper -normally -maryland -assistance -jump -constant -offering -friendly -waters -persons -realize -contain -trophy -800 -partnership -factor -58 -musicians -cry -bound -oregon -indicated -hero -houston -medium -##ure -consisting -somewhat -##ara -57 -cycle -##che -beer -moore -frederick -gotten -eleven -worst -weak -approached -arranged -chin -loan -universal -bond -fifteen -pattern -disappeared -##ney -translated -##zed -lip -arab -capture -interests -insurance -##chi -shifted -cave -prix -warning -sections -courts -coat -plot -smell -feed -golf -favorite -maintain -knife -vs -voted -degrees -finance -quebec -opinion -translation -manner -ruled -operate -productions -choose -musician -discovery -confused -tired -separated -stream -techniques -committed -attend -ranking -kings -throw -passengers -measure -horror -fan -mining -sand -danger -salt -calm -decade -dam -require -runner -##ik -rush -associate -greece -##ker -rivers -consecutive -matthew -##ski -sighed -sq -documents -steam -edited -closing -tie -accused -1905 -##ini -islamic -distributed -directors -organisation -bruce -7th -breathing -mad -lit -arrival -concrete -taste -08 -composition -shaking -faster -amateur -adjacent -stating -1906 -twin -flew -##ran -tokyo -publications -##tone -obviously -ridge -storage -1907 -carl -pages -concluded -desert -driven -universities -ages -terminal -sequence -borough -250 -constituency -creative -cousin -economics -dreams -margaret -notably -reduce -montreal -mode -17th -ears -saved -jan -vocal -##ica -1909 -andy -##jo -riding -roughly -threatened -##ise -meters -meanwhile -landed -compete -repeated -grass -czech -regularly -charges -tea -sudden -appeal -##ung -solution -describes -pierre -classification -glad -parking -##ning -belt -physics -99 -rachel -add -hungarian -participate -expedition -damaged -gift -childhood -85 -fifty -##red -mathematics -jumped -letting -defensive -mph -##ux -##gh -testing -##hip -hundreds -shoot -owners -matters -smoke -israeli -kentucky -dancing -mounted -grandfather -emma -designs -profit -argentina -##gs -truly -li -lawrence -cole -begun -detroit -willing -branches -smiling -decide -miami -enjoyed -recordings -##dale -poverty -ethnic -gay -##bi -gary -arabic -09 -accompanied -##one -##ons -fishing -determine -residential -acid -##ary -alice -returns -starred -mail -##ang -jonathan -strategy -##ue -net -forty -cook -businesses -equivalent -commonwealth -distinct -ill -##cy -seriously -##ors -##ped -shift -harris -replace -rio -imagine -formula -ensure -##ber -additionally -scheme -conservation -occasionally -purposes -feels -favor -##and -##ore -1930s -contrast -hanging -hunt -movies -1904 -instruments -victims -danish -christopher -busy -demon -sugar -earliest -colony -studying -balance -duties -##ks -belgium -slipped -carter -05 -visible -stages -iraq -fifa -##im -commune -forming -zero -07 -continuing -talked -counties -legend -bathroom -option -tail -clay -daughters -afterwards -severe -jaw -visitors -##ded -devices -aviation -russell -kate -##vi -entering -subjects -##ino -temporary -swimming -forth -smooth -ghost -audio -bush -operates -rocks -movements -signs -eddie -##tz -ann -voices -honorary -06 -memories -dallas -pure -measures -racial -promised -66 -harvard -ceo -16th -parliamentary -indicate -benefit -flesh -dublin -louisiana -1902 -1901 -patient -sleeping -1903 -membership -coastal -medieval -wanting -element -scholars -rice -62 -limit -survive -makeup -rating -definitely -collaboration -obvious -##tan -boss -ms -baron -birthday -linked -soil -diocese -##lan -ncaa -##mann -offensive -shell -shouldn -waist -##tus -plain -ross -organ -resolution -manufacturing -adding -relative -kennedy -98 -whilst -moth -marketing -gardens -crash -72 -heading -partners -credited -carlos -moves -cable -##zi -marshall -##out -depending -bottle -represents -rejected -responded -existed -04 -jobs -denmark -lock -##ating -treated -graham -routes -talent -commissioner -drugs -secure -tests -reign -restored -photography -##gi -contributions -oklahoma -designer -disc -grin -seattle -robin -paused -atlanta -unusual -##gate -praised -las -laughing -satellite -hungary -visiting -##sky -interesting -factors -deck -poems -norman -##water -stuck -speaker -rifle -domain -premiered -##her -dc -comics -actors -01 -reputation -eliminated -8th -ceiling -prisoners -script -##nce -leather -austin -mississippi -rapidly -admiral -parallel -charlotte -guilty -tools -gender -divisions -fruit -##bs -laboratory -nelson -fantasy -marry -rapid -aunt -tribe -requirements -aspects -suicide -amongst -adams -bone -ukraine -abc -kick -sees -edinburgh -clothing -column -rough -gods -hunting -broadway -gathered -concerns -##ek -spending -ty -12th -snapped -requires -solar -bones -cavalry -##tta -iowa -drinking -waste -index -franklin -charity -thompson -stewart -tip -flash -landscape -friday -enjoy -singh -poem -listening -##back -eighth -fred -differences -adapted -bomb -ukrainian -surgery -corporate -masters -anywhere -##more -waves -odd -sean -portugal -orleans -dick -debate -kent -eating -puerto -cleared -96 -expect -cinema -97 -guitarist -blocks -electrical -agree -involving -depth -dying -panel -struggle -##ged -peninsula -adults -novels -emerged -vienna -metro -debuted -shoes -tamil -songwriter -meets -prove -beating -instance -heaven -scared -sending -marks -artistic -passage -superior -03 -significantly -shopping -##tive -retained -##izing -malaysia -technique -cheeks -##ola -warren -maintenance -destroy -extreme -allied -120 -appearing -##yn -fill -advice -alabama -qualifying -policies -cleveland -hat -battery -smart -authors -10th -soundtrack -acted -dated -lb -glance -equipped -coalition -funny -outer -ambassador -roy -possibility -couples -campbell -dna -loose -ethan -supplies -1898 -gonna -88 -monster -##res -shake -agents -frequency -springs -dogs -practices -61 -gang -plastic -easier -suggests -gulf -blade -exposed -colors -industries -markets -pan -nervous -electoral -charts -legislation -ownership -##idae -mac -appointment -shield -copy -assault -socialist -abbey -monument -license -throne -employment -jay -93 -replacement -charter -cloud -powered -suffering -accounts -oak -connecticut -strongly -wright -colour -crystal -13th -context -welsh -networks -voiced -gabriel -jerry -##cing -forehead -mp -##ens -manage -schedule -totally -remix -##ii -forests -occupation -print -nicholas -brazilian -strategic -vampires -engineers -76 -roots -seek -correct -instrumental -und -alfred -backed -hop -##des -stanley -robinson -traveled -wayne -welcome -austrian -achieve -67 -exit -rates -1899 -strip -whereas -##cs -sing -deeply -adventure -bobby -rick -jamie -careful -components -cap -useful -personality -knee -##shi -pushing -hosts -02 -protest -ca -ottoman -symphony -##sis -63 -boundary -1890 -processes -considering -considerable -tons -##work -##ft -##nia -cooper -trading -dear -conduct -91 -illegal -apple -revolutionary -holiday -definition -harder -##van -jacob -circumstances -destruction -##lle -popularity -grip -classified -liverpool -donald -baltimore -flows -seeking -honour -approval -92 -mechanical -till -happening -statue -critic -increasingly -immediate -describe -commerce -stare -##ster -indonesia -meat -rounds -boats -baker -orthodox -depression -formally -worn -naked -claire -muttered -sentence -11th -emily -document -77 -criticism -wished -vessel -spiritual -bent -virgin -parker -minimum -murray -lunch -danny -printed -compilation -keyboards -false -blow -belonged -68 -raising -78 -cutting -##board -pittsburgh -##up -9th -shadows -81 -hated -indigenous -jon -15th -barry -scholar -ah -##zer -oliver -##gy -stick -susan -meetings -attracted -spell -romantic -##ver -ye -1895 -photo -demanded -customers -##ac -1896 -logan -revival -keys -modified -commanded -jeans -##ious -upset -raw -phil -detective -hiding -resident -vincent -##bly -experiences -diamond -defeating -coverage -lucas -external -parks -franchise -helen -bible -successor -percussion -celebrated -il -lift -profile -clan -romania -##ied -mills -##su -nobody -achievement -shrugged -fault -1897 -rhythm -initiative -breakfast -carbon -700 -69 -lasted -violent -74 -wound -ken -killer -gradually -filmed -°c -dollars -processing -94 -remove -criticized -guests -sang -chemistry -##vin -legislature -disney -##bridge -uniform -escaped -integrated -proposal -purple -denied -liquid -karl -influential -morris -nights -stones -intense -experimental -twisted -71 -84 -##ld -pace -nazi -mitchell -ny -blind -reporter -newspapers -14th -centers -burn -basin -forgotten -surviving -filed -collections -monastery -losses -manual -couch -description -appropriate -merely -tag -missions -sebastian -restoration -replacing -triple -73 -elder -julia -warriors -benjamin -julian -convinced -stronger -amazing -declined -versus -merchant -happens -output -finland -bare -barbara -absence -ignored -dawn -injuries -##port -producers -##ram -82 -luis -##ities -kw -admit -expensive -electricity -nba -exception -symbol -##ving -ladies -shower -sheriff -characteristics -##je -aimed -button -ratio -effectively -summit -angle -jury -bears -foster -vessels -pants -executed -evans -dozen -advertising -kicked -patrol -1889 -competitions -lifetime -principles -athletics -##logy -birmingham -sponsored -89 -rob -nomination -1893 -acoustic -##sm -creature -longest -##tra -credits -harbor -dust -josh -##so -territories -milk -infrastructure -completion -thailand -indians -leon -archbishop -##sy -assist -pitch -blake -arrangement -girlfriend -serbian -operational -hence -sad -scent -fur -dj -sessions -hp -refer -rarely -##ora -exists -1892 -##ten -scientists -dirty -penalty -burst -portrait -seed -79 -pole -limits -rival -1894 -stable -alpha -grave -constitutional -alcohol -arrest -flower -mystery -devil -architectural -relationships -greatly -habitat -##istic -larry -progressive -remote -cotton -##ics -##ok -preserved -reaches -##ming -cited -86 -vast -scholarship -decisions -cbs -joy -teach -1885 -editions -knocked -eve -searching -partly -participation -gap -animated -fate -excellent -##ett -na -87 -alternate -saints -youngest -##ily -climbed -##ita -##tors -suggest -##ct -discussion -staying -choir -lakes -jacket -revenue -nevertheless -peaked -instrument -wondering -annually -managing -neil -1891 -signing -terry -##ice -apply -clinical -brooklyn -aim -catherine -fuck -farmers -figured -ninth -pride -hugh -evolution -ordinary -involvement -comfortable -shouted -tech -encouraged -taiwan -representation -sharing -##lia -##em -panic -exact -cargo -competing -fat -cried -83 -1920s -occasions -pa -cabin -borders -utah -marcus -##isation -badly -muscles -##ance -victorian -transition -warner -bet -permission -##rin -slave -terrible -similarly -shares -seth -uefa -possession -medals -benefits -colleges -lowered -perfectly -mall -transit -##ye -##kar -publisher -##ened -harrison -deaths -elevation -##ae -asleep -machines -sigh -ash -hardly -argument -occasion -parent -leo -decline -1888 -contribution -##ua -concentration -1000 -opportunities -hispanic -guardian -extent -emotions -hips -mason -volumes -bloody -controversy -diameter -steady -mistake -phoenix -identify -violin -##sk -departure -richmond -spin -funeral -enemies -1864 -gear -literally -connor -random -sergeant -grab -confusion -1865 -transmission -informed -op -leaning -sacred -suspended -thinks -gates -portland -luck -agencies -yours -hull -expert -muscle -layer -practical -sculpture -jerusalem -latest -lloyd -statistics -deeper -recommended -warrior -arkansas -mess -supports -greg -eagle -1880 -recovered -rated -concerts -rushed -##ano -stops -eggs -files -premiere -keith -##vo -delhi -turner -pit -affair -belief -paint -##zing -mate -##ach -##ev -victim -##ology -withdrew -bonus -styles -fled -##ud -glasgow -technologies -funded -nbc -adaptation -##ata -portrayed -cooperation -supporters -judges -bernard -justin -hallway -ralph -##ick -graduating -controversial -distant -continental -spider -bite -##ho -recognize -intention -mixing -##ese -egyptian -bow -tourism -suppose -claiming -tiger -dominated -participants -vi -##ru -nurse -partially -tape -##rum -psychology -##rn -essential -touring -duo -voting -civilian -emotional -channels -##king -apparent -hebrew -1887 -tommy -carrier -intersection -beast -hudson -##gar -##zo -lab -nova -bench -discuss -costa -##ered -detailed -behalf -drivers -unfortunately -obtain -##lis -rocky -##dae -siege -friendship -honey -##rian -1861 -amy -hang -posted -governments -collins -respond -wildlife -preferred -operator -##po -laura -pregnant -videos -dennis -suspected -boots -instantly -weird -automatic -businessman -alleged -placing -throwing -ph -mood -1862 -perry -venue -jet -remainder -##lli -##ci -passion -biological -boyfriend -1863 -dirt -buffalo -ron -segment -fa -abuse -##era -genre -thrown -stroke -colored -stress -exercise -displayed -##gen -struggled -##tti -abroad -dramatic -wonderful -thereafter -madrid -component -widespread -##sed -tale -citizen -todd -monday -1886 -vancouver -overseas -forcing -crying -descent -##ris -discussed -substantial -ranks -regime -1870 -provinces -switch -drum -zane -ted -tribes -proof -lp -cream -researchers -volunteer -manor -silk -milan -donated -allies -venture -principle -delivery -enterprise -##ves -##ans -bars -traditionally -witch -reminded -copper -##uk -pete -inter -links -colin -grinned -elsewhere -competitive -frequent -##oy -scream -##hu -tension -texts -submarine -finnish -defending -defend -pat -detail -1884 -affiliated -stuart -themes -villa -periods -tool -belgian -ruling -crimes -answers -folded -licensed -resort -demolished -hans -lucy -1881 -lion -traded -photographs -writes -craig -##fa -trials -generated -beth -noble -debt -percentage -yorkshire -erected -ss -viewed -grades -confidence -ceased -islam -telephone -retail -##ible -chile -m² -roberts -sixteen -##ich -commented -hampshire -innocent -dual -pounds -checked -regulations -afghanistan -sung -rico -liberty -assets -bigger -options -angels -relegated -tribute -wells -attending -leaf -##yan -butler -romanian -forum -monthly -lisa -patterns -gmina -##tory -madison -hurricane -rev -##ians -bristol -##ula -elite -valuable -disaster -democracy -awareness -germans -freyja -##ins -loop -absolutely -paying -populations -maine -sole -prayer -spencer -releases -doorway -bull -##ani -lover -midnight -conclusion -##sson -thirteen -lily -mediterranean -##lt -nhl -proud -sample -##hill -drummer -guinea -##ova -murphy -climb -##ston -instant -attributed -horn -ain -railways -steven -##ao -autumn -ferry -opponent -root -traveling -secured -corridor -stretched -tales -sheet -trinity -cattle -helps -indicates -manhattan -murdered -fitted -1882 -gentle -grandmother -mines -shocked -vegas -produces -##light -caribbean -##ou -belong -continuous -desperate -drunk -historically -trio -waved -raf -dealing -nathan -bat -murmured -interrupted -residing -scientist -pioneer -harold -aaron -##net -delta -attempting -minority -mini -believes -chorus -tend -lots -eyed -indoor -load -shots -updated -jail -##llo -concerning -connecting -wealth -##ved -slaves -arrive -rangers -sufficient -rebuilt -##wick -cardinal -flood -muhammad -whenever -relation -runners -moral -repair -viewers -arriving -revenge -punk -assisted -bath -fairly -breathe -lists -innings -illustrated -whisper -nearest -voters -clinton -ties -ultimate -screamed -beijing -lions -andre -fictional -gathering -comfort -radar -suitable -dismissed -hms -ban -pine -wrist -atmosphere -voivodeship -bid -timber -##ned -##nan -giants -##ane -cameron -recovery -uss -identical -categories -switched -serbia -laughter -noah -ensemble -therapy -peoples -touching -##off -locally -pearl -platforms -everywhere -ballet -tables -lanka -herbert -outdoor -toured -derek -1883 -spaces -contested -swept -1878 -exclusive -slight -connections -##dra -winds -prisoner -collective -bangladesh -tube -publicly -wealthy -thai -##ys -isolated -select -##ric -insisted -pen -fortune -ticket -spotted -reportedly -animation -enforcement -tanks -110 -decides -wider -lowest -owen -##time -nod -hitting -##hn -gregory -furthermore -magazines -fighters -solutions -##ery -pointing -requested -peru -reed -chancellor -knights -mask -worker -eldest -flames -reduction -1860 -volunteers -##tis -reporting -##hl -wire -advisory -endemic -origins -settlers -pursue -knock -consumer -1876 -eu -compound -creatures -mansion -sentenced -ivan -deployed -guitars -frowned -involves -mechanism -kilometers -perspective -shops -maps -terminus -duncan -alien -fist -bridges -##pers -heroes -fed -derby -swallowed -##ros -patent -sara -illness -characterized -adventures -slide -hawaii -jurisdiction -##op -organised -##side -adelaide -walks -biology -se -##ties -rogers -swing -tightly -boundaries -##rie -prepare -implementation -stolen -##sha -certified -colombia -edwards -garage -##mm -recalled -##ball -rage -harm -nigeria -breast -##ren -furniture -pupils -settle -##lus -cuba -balls -client -alaska -21st -linear -thrust -celebration -latino -genetic -terror -##cia -##ening -lightning -fee -witness -lodge -establishing -skull -##ique -earning -hood -##ei -rebellion -wang -sporting -warned -missile -devoted -activist -porch -worship -fourteen -package -1871 -decorated -##shire -housed -##ock -chess -sailed -doctors -oscar -joan -treat -garcia -harbour -jeremy -##ire -traditions -dominant -jacques -##gon -##wan -relocated -1879 -amendment -sized -companion -simultaneously -volleyball -spun -acre -increases -stopping -loves -belongs -affect -drafted -tossed -scout -battles -1875 -filming -shoved -munich -tenure -vertical -romance -pc -##cher -argue -##ical -craft -ranging -www -opens -honest -tyler -yesterday -virtual -##let -muslims -reveal -snake -immigrants -radical -screaming -speakers -firing -saving -belonging -ease -lighting -prefecture -blame -farmer -hungry -grows -rubbed -beam -sur -subsidiary -##cha -armenian -sao -dropping -conventional -##fer -microsoft -reply -qualify -spots -1867 -sweat -festivals -##ken -immigration -physician -discover -exposure -sandy -explanation -isaac -implemented -##fish -hart -initiated -connect -stakes -presents -heights -householder -pleased -tourist -regardless -slip -closest -##ction -surely -sultan -brings -riley -preparation -aboard -slammed -baptist -experiment -ongoing -interstate -organic -playoffs -##ika -1877 -130 -##tar -hindu -error -tours -tier -plenty -arrangements -talks -trapped -excited -sank -ho -athens -1872 -denver -welfare -suburb -athletes -trick -diverse -belly -exclusively -yelled -1868 -##med -conversion -##ette -1874 -internationally -computers -conductor -abilities -sensitive -hello -dispute -measured -globe -rocket -prices -amsterdam -flights -tigers -inn -municipalities -emotion -references -3d -##mus -explains -airlines -manufactured -pm -archaeological -1873 -interpretation -devon -comment -##ites -settlements -kissing -absolute -improvement -suite -impressed -barcelona -sullivan -jefferson -towers -jesse -julie -##tin -##lu -grandson -hi -gauge -regard -rings -interviews -trace -raymond -thumb -departments -burns -serial -bulgarian -scores -demonstrated -##ix -1866 -kyle -alberta -underneath -romanized -##ward -relieved -acquisition -phrase -cliff -reveals -han -cuts -merger -custom -##dar -nee -gilbert -graduation -##nts -assessment -cafe -difficulty -demands -swung -democrat -jennifer -commons -1940s -grove -##yo -completing -focuses -sum -substitute -bearing -stretch -reception -##py -reflected -essentially -destination -pairs -##ched -survival -resource -##bach -promoting -doubles -messages -tear -##down -##fully -parade -florence -harvey -incumbent -partial -framework -900 -pedro -frozen -procedure -olivia -controls -##mic -shelter -personally -temperatures -##od -brisbane -tested -sits -marble -comprehensive -oxygen -leonard -##kov -inaugural -iranian -referring -quarters -attitude -##ivity -mainstream -lined -mars -dakota -norfolk -unsuccessful -##° -explosion -helicopter -congressional -##sing -inspector -bitch -seal -departed -divine -##ters -coaching -examination -punishment -manufacturer -sink -columns -unincorporated -signals -nevada -squeezed -dylan -dining -photos -martial -manuel -eighteen -elevator -brushed -plates -ministers -ivy -congregation -##len -slept -specialized -taxes -curve -restricted -negotiations -likes -statistical -arnold -inspiration -execution -bold -intermediate -significance -margin -ruler -wheels -gothic -intellectual -dependent -listened -eligible -buses -widow -syria -earn -cincinnati -collapsed -recipient -secrets -accessible -philippine -maritime -goddess -clerk -surrender -breaks -playoff -database -##ified -##lon -ideal -beetle -aspect -soap -regulation -strings -expand -anglo -shorter -crosses -retreat -tough -coins -wallace -directions -pressing -##oon -shipping -locomotives -comparison -topics -nephew -##mes -distinction -honors -travelled -sierra -ibn -##over -fortress -sa -recognised -carved -1869 -clients -##dan -intent -##mar -coaches -describing -bread -##ington -beaten -northwestern -##ona -merit -youtube -collapse -challenges -em -historians -objective -submitted -virus -attacking -drake -assume -##ere -diseases -marc -stem -leeds -##cus -##ab -farming -glasses -##lock -visits -nowhere -fellowship -relevant -carries -restaurants -experiments -101 -constantly -bases -targets -shah -tenth -opponents -verse -territorial -##ira -writings -corruption -##hs -instruction -inherited -reverse -emphasis -##vic -employee -arch -keeps -rabbi -watson -payment -uh -##ala -nancy -##tre -venice -fastest -sexy -banned -adrian -properly -ruth -touchdown -dollar -boards -metre -circles -edges -favour -comments -ok -travels -liberation -scattered -firmly -##ular -holland -permitted -diesel -kenya -den -originated -##ral -demons -resumed -dragged -rider -##rus -servant -blinked -extend -torn -##ias -##sey -input -meal -everybody -cylinder -kinds -camps -##fe -bullet -logic -##wn -croatian -evolved -healthy -fool -chocolate -wise -preserve -pradesh -##ess -respective -1850 -##ew -chicken -artificial -gross -corresponding -convicted -cage -caroline -dialogue -##dor -narrative -stranger -mario -br -christianity -failing -trent -commanding -buddhist -1848 -maurice -focusing -yale -bike -altitude -##ering -mouse -revised -##sley -veteran -##ig -pulls -theology -crashed -campaigns -legion -##ability -drag -excellence -customer -cancelled -intensity -excuse -##lar -liga -participating -contributing -printing -##burn -variable -##rk -curious -bin -legacy -renaissance -##my -symptoms -binding -vocalist -dancer -##nie -grammar -gospel -democrats -ya -enters -sc -diplomatic -hitler -##ser -clouds -mathematical -quit -defended -oriented -##heim -fundamental -hardware -impressive -equally -convince -confederate -guilt -chuck -sliding -##ware -magnetic -narrowed -petersburg -bulgaria -otto -phd -skill -##ama -reader -hopes -pitcher -reservoir -hearts -automatically -expecting -mysterious -bennett -extensively -imagined -seeds -monitor -fix -##ative -journalism -struggling -signature -ranch -encounter -photographer -observation -protests -##pin -influences -##hr -calendar -##all -cruz -croatia -locomotive -hughes -naturally -shakespeare -basement -hook -uncredited -faded -theories -approaches -dare -phillips -filling -fury -obama -##ain -efficient -arc -deliver -min -raid -breeding -inducted -leagues -efficiency -axis -montana -eagles -##ked -supplied -instructions -karen -picking -indicating -trap -anchor -practically -christians -tomb -vary -occasional -electronics -lords -readers -newcastle -faint -innovation -collect -situations -engagement -160 -claude -mixture -##feld -peer -tissue -logo -lean -##ration -°f -floors -##ven -architects -reducing -##our -##ments -rope -1859 -ottawa -##har -samples -banking -declaration -proteins -resignation -francois -saudi -advocate -exhibited -armor -twins -divorce -##ras -abraham -reviewed -jo -temporarily -matrix -physically -pulse -curled -##ena -difficulties -bengal -usage -##ban -annie -riders -certificate -##pi -holes -warsaw -distinctive -jessica -##mon -mutual -1857 -customs -circular -eugene -removal -loaded -mere -vulnerable -depicted -generations -dame -heir -enormous -lightly -climbing -pitched -lessons -pilots -nepal -ram -google -preparing -brad -louise -renowned -##₂ -liam -##ably -plaza -shaw -sophie -brilliant -bills -##bar -##nik -fucking -mainland -server -pleasant -seized -veterans -jerked -fail -beta -brush -radiation -stored -warmth -southeastern -nate -sin -raced -berkeley -joke -athlete -designation -trunk -##low -roland -qualification -archives -heels -artwork -receives -judicial -reserves -##bed -woke -installation -abu -floating -fake -lesser -excitement -interface -concentrated -addressed -characteristic -amanda -saxophone -monk -auto -##bus -releasing -egg -dies -interaction -defender -ce -outbreak -glory -loving -##bert -sequel -consciousness -http -awake -ski -enrolled -##ress -handling -rookie -brow -somebody -biography -warfare -amounts -contracts -presentation -fabric -dissolved -challenged -meter -psychological -lt -elevated -rally -accurate -##tha -hospitals -undergraduate -specialist -venezuela -exhibit -shed -nursing -protestant -fluid -structural -footage -jared -consistent -prey -##ska -succession -reflect -exile -lebanon -wiped -suspect -shanghai -resting -integration -preservation -marvel -variant -pirates -sheep -rounded -capita -sailing -colonies -manuscript -deemed -variations -clarke -functional -emerging -boxing -relaxed -curse -azerbaijan -heavyweight -nickname -editorial -rang -grid -tightened -earthquake -flashed -miguel -rushing -##ches -improvements -boxes -brooks -180 -consumption -molecular -felix -societies -repeatedly -variation -aids -civic -graphics -professionals -realm -autonomous -receiver -delayed -workshop -militia -chairs -trump -canyon -##point -harsh -extending -lovely -happiness -##jan -stake -eyebrows -embassy -wellington -hannah -##ella -sony -corners -bishops -swear -cloth -contents -xi -namely -commenced -1854 -stanford -nashville -courage -graphic -commitment -garrison -##bin -hamlet -clearing -rebels -attraction -literacy -cooking -ruins -temples -jenny -humanity -celebrate -hasn -freight -sixty -rebel -bastard -##art -newton -##ada -deer -##ges -##ching -smiles -delaware -singers -##ets -approaching -assists -flame -##ph -boulevard -barrel -planted -##ome -pursuit -##sia -consequences -posts -shallow -invitation -rode -depot -ernest -kane -rod -concepts -preston -topic -chambers -striking -blast -arrives -descendants -montgomery -ranges -worlds -##lay -##ari -span -chaos -praise -##ag -fewer -1855 -sanctuary -mud -fbi -##ions -programmes -maintaining -unity -harper -bore -handsome -closure -tournaments -thunder -nebraska -linda -facade -puts -satisfied -argentine -dale -cork -dome -panama -##yl -1858 -tasks -experts -##ates -feeding -equation -##las -##ida -##tu -engage -bryan -##ax -um -quartet -melody -disbanded -sheffield -blocked -gasped -delay -kisses -maggie -connects -##non -sts -poured -creator -publishers -##we -guided -ellis -extinct -hug -gaining -##ord -complicated -##bility -poll -clenched -investigate -##use -thereby -quantum -spine -cdp -humor -kills -administered -semifinals -##du -encountered -ignore -##bu -commentary -##maker -bother -roosevelt -140 -plains -halfway -flowing -cultures -crack -imprisoned -neighboring -airline -##ses -##view -##mate -##ec -gather -wolves -marathon -transformed -##ill -cruise -organisations -carol -punch -exhibitions -numbered -alarm -ratings -daddy -silently -##stein -queens -colours -impression -guidance -liu -tactical -##rat -marshal -della -arrow -##ings -rested -feared -tender -owns -bitter -advisor -escort -##ides -spare -farms -grants -##ene -dragons -encourage -colleagues -cameras -##und -sucked -pile -spirits -prague -statements -suspension -landmark -fence -torture -recreation -bags -permanently -survivors -pond -spy -predecessor -bombing -coup -##og -protecting -transformation -glow -##lands -##book -dug -priests -andrea -feat -barn -jumping -##chen -##ologist -##con -casualties -stern -auckland -pipe -serie -revealing -ba -##bel -trevor -mercy -spectrum -yang -consist -governing -collaborated -possessed -epic -comprises -blew -shane -##ack -lopez -honored -magical -sacrifice -judgment -perceived -hammer -mtv -baronet -tune -das -missionary -sheets -350 -neutral -oral -threatening -attractive -shade -aims -seminary -##master -estates -1856 -michel -wounds -refugees -manufacturers -##nic -mercury -syndrome -porter -##iya -##din -hamburg -identification -upstairs -purse -widened -pause -cared -breathed -affiliate -santiago -prevented -celtic -fisher -125 -recruited -byzantine -reconstruction -farther -##mp -diet -sake -au -spite -sensation -##ert -blank -separation -105 -##hon -vladimir -armies -anime -##lie -accommodate -orbit -cult -sofia -archive -##ify -##box -founders -sustained -disorder -honours -northeastern -mia -crops -violet -threats -blanket -fires -canton -followers -southwestern -prototype -voyage -assignment -altered -moderate -protocol -pistol -##eo -questioned -brass -lifting -1852 -math -authored -##ual -doug -dimensional -dynamic -##san -1851 -pronounced -grateful -quest -uncomfortable -boom -presidency -stevens -relating -politicians -chen -barrier -quinn -diana -mosque -tribal -cheese -palmer -portions -sometime -chester -treasure -wu -bend -download -millions -reforms -registration -##osa -consequently -monitoring -ate -preliminary -brandon -invented -ps -eaten -exterior -intervention -ports -documented -log -displays -lecture -sally -favourite -##itz -vermont -lo -invisible -isle -breed -##ator -journalists -relay -speaks -backward -explore -midfielder -actively -stefan -procedures -cannon -blond -kenneth -centered -servants -chains -libraries -malcolm -essex -henri -slavery -##hal -facts -fairy -coached -cassie -cats -washed -cop -##fi -announcement -item -2000s -vinyl -activated -marco -frontier -growled -curriculum -##das -loyal -accomplished -leslie -ritual -kenny -##00 -vii -napoleon -hollow -hybrid -jungle -stationed -friedrich -counted -##ulated -platinum -theatrical -seated -col -rubber -glen -1840 -diversity -healing -extends -id -provisions -administrator -columbus -##oe -tributary -te -assured -org -##uous -prestigious -examined -lectures -grammy -ronald -associations -bailey -allan -essays -flute -believing -consultant -proceedings -travelling -1853 -kit -kerala -yugoslavia -buddy -methodist -##ith -burial -centres -batman -##nda -discontinued -bo -dock -stockholm -lungs -severely -##nk -citing -manga -##ugh -steal -mumbai -iraqi -robot -celebrity -bride -broadcasts -abolished -pot -joel -overhead -franz -packed -reconnaissance -johann -acknowledged -introduce -handled -doctorate -developments -drinks -alley -palestine -##nis -##aki -proceeded -recover -bradley -grain -patch -afford -infection -nationalist -legendary -##ath -interchange -virtually -gen -gravity -exploration -amber -vital -wishes -powell -doctrine -elbow -screenplay -##bird -contribute -indonesian -pet -creates -##com -enzyme -kylie -discipline -drops -manila -hunger -##ien -layers -suffer -fever -bits -monica -keyboard -manages -##hood -searched -appeals -##bad -testament -grande -reid -##war -beliefs -congo -##ification -##dia -si -requiring -##via -casey -1849 -regret -streak -rape -depends -syrian -sprint -pound -tourists -upcoming -pub -##xi -tense -##els -practiced -echo -nationwide -guild -motorcycle -liz -##zar -chiefs -desired -elena -bye -precious -absorbed -relatives -booth -pianist -##mal -citizenship -exhausted -wilhelm -##ceae -##hed -noting -quarterback -urge -hectares -##gue -ace -holly -##tal -blonde -davies -parked -sustainable -stepping -twentieth -airfield -galaxy -nest -chip -##nell -tan -shaft -paulo -requirement -##zy -paradise -tobacco -trans -renewed -vietnamese -##cker -##ju -suggesting -catching -holmes -enjoying -md -trips -colt -holder -butterfly -nerve -reformed -cherry -bowling -trailer -carriage -goodbye -appreciate -toy -joshua -interactive -enabled -involve -##kan -collar -determination -bunch -facebook -recall -shorts -superintendent -episcopal -frustration -giovanni -nineteenth -laser -privately -array -circulation -##ovic -armstrong -deals -painful -permit -discrimination -##wi -aires -retiring -cottage -ni -##sta -horizon -ellen -jamaica -ripped -fernando -chapters -playstation -patron -lecturer -navigation -behaviour -genes -georgian -export -solomon -rivals -swift -seventeen -rodriguez -princeton -independently -sox -1847 -arguing -entity -casting -hank -criteria -oakland -geographic -milwaukee -reflection -expanding -conquest -dubbed -##tv -halt -brave -brunswick -doi -arched -curtis -divorced -predominantly -somerset -streams -ugly -zoo -horrible -curved -buenos -fierce -dictionary -vector -theological -unions -handful -stability -chan -punjab -segments -##lly -altar -ignoring -gesture -monsters -pastor -##stone -thighs -unexpected -operators -abruptly -coin -compiled -associates -improving -migration -pin -##ose -compact -collegiate -reserved -##urs -quarterfinals -roster -restore -assembled -hurry -oval -##cies -1846 -flags -martha -##del -victories -sharply -##rated -argues -deadly -neo -drawings -symbols -performer -##iel -griffin -restrictions -editing -andrews -java -journals -arabia -compositions -dee -pierce -removing -hindi -casino -runway -civilians -minds -nasa -hotels -##zation -refuge -rent -retain -potentially -conferences -suburban -conducting -##tto -##tions -##tle -descended -massacre -##cal -ammunition -terrain -fork -souls -counts -chelsea -durham -drives -cab -##bank -perth -realizing -palestinian -finn -simpson -##dal -betty -##ule -moreover -particles -cardinals -tent -evaluation -extraordinary -##oid -inscription -##works -wednesday -chloe -maintains -panels -ashley -trucks -##nation -cluster -sunlight -strikes -zhang -##wing -dialect -canon -##ap -tucked -##ws -collecting -##mas -##can -##sville -maker -quoted -evan -franco -aria -buying -cleaning -eva -closet -provision -apollo -clinic -rat -##ez -necessarily -ac -##gle -##ising -venues -flipped -cent -spreading -trustees -checking -authorized -##sco -disappointed -##ado -notion -duration -trumpet -hesitated -topped -brussels -rolls -theoretical -hint -define -aggressive -repeat -wash -peaceful -optical -width -allegedly -mcdonald -strict -copyright -##illa -investors -mar -jam -witnesses -sounding -miranda -michelle -privacy -hugo -harmony -##pp -valid -lynn -glared -nina -102 -headquartered -diving -boarding -gibson -##ncy -albanian -marsh -routine -dealt -enhanced -er -intelligent -substance -targeted -enlisted -discovers -spinning -observations -pissed -smoking -rebecca -capitol -visa -varied -costume -seemingly -indies -compensation -surgeon -thursday -arsenal -westminster -suburbs -rid -anglican -##ridge -knots -foods -alumni -lighter -fraser -whoever -portal -scandal -##ray -gavin -advised -instructor -flooding -terrorist -##ale -teenage -interim -senses -duck -teen -thesis -abby -eager -overcome -##ile -newport -glenn -rises -shame -##cc -prompted -priority -forgot -bomber -nicolas -protective -360 -cartoon -katherine -breeze -lonely -trusted -henderson -richardson -relax -banner -candy -palms -remarkable -##rio -legends -cricketer -essay -ordained -edmund -rifles -trigger -##uri -##away -sail -alert -1830 -audiences -penn -sussex -siblings -pursued -indianapolis -resist -rosa -consequence -succeed -avoided -1845 -##ulation -inland -##tie -##nna -counsel -profession -chronicle -hurried -##una -eyebrow -eventual -bleeding -innovative -cure -##dom -committees -accounting -con -scope -hardy -heather -tenor -gut -herald -codes -tore -scales -wagon -##oo -luxury -tin -prefer -fountain -triangle -bonds -darling -convoy -dried -traced -beings -troy -accidentally -slam -findings -smelled -joey -lawyers -outcome -steep -bosnia -configuration -shifting -toll -brook -performers -lobby -philosophical -construct -shrine -aggregate -boot -cox -phenomenon -savage -insane -solely -reynolds -lifestyle -##ima -nationally -holdings -consideration -enable -edgar -mo -mama -##tein -fights -relegation -chances -atomic -hub -conjunction -awkward -reactions -currency -finale -kumar -underwent -steering -elaborate -gifts -comprising -melissa -veins -reasonable -sunshine -chi -solve -trails -inhabited -elimination -ethics -huh -ana -molly -consent -apartments -layout -marines -##ces -hunters -bulk -##oma -hometown -##wall -##mont -cracked -reads -neighbouring -withdrawn -admission -wingspan -damned -anthology -lancashire -brands -batting -forgive -cuban -awful -##lyn -104 -dimensions -imagination -##ade -dante -##ship -tracking -desperately -goalkeeper -##yne -groaned -workshops -confident -burton -gerald -milton -circus -uncertain -slope -copenhagen -sophia -fog -philosopher -portraits -accent -cycling -varying -gripped -larvae -garrett -specified -scotia -mature -luther -kurt -rap -##kes -aerial -750 -ferdinand -heated -es -transported -##shan -safely -nonetheless -##orn -##gal -motors -demanding -##sburg -startled -##brook -ally -generate -caps -ghana -stained -demo -mentions -beds -ap -afterward -diary -##bling -utility -##iro -richards -1837 -conspiracy -conscious -shining -footsteps -observer -cyprus -urged -loyalty -developer -probability -olive -upgraded -gym -miracle -insects -graves -1844 -ourselves -hydrogen -amazon -katie -tickets -poets -##pm -planes -##pan -prevention -witnessed -dense -jin -randy -tang -warehouse -monroe -bang -archived -elderly -investigations -alec -granite -mineral -conflicts -controlling -aboriginal -carlo -##zu -mechanics -stan -stark -rhode -skirt -est -##berry -bombs -respected -##horn -imposed -limestone -deny -nominee -memphis -grabbing -disabled -##als -amusement -aa -frankfurt -corn -referendum -varies -slowed -disk -firms -unconscious -incredible -clue -sue -##zhou -twist -##cio -joins -idaho -chad -developers -computing -destroyer -103 -mortal -tucker -kingston -choices -yu -carson -1800 -os -whitney -geneva -pretend -dimension -staged -plateau -maya -##une -freestyle -##bc -rovers -hiv -##ids -tristan -classroom -prospect -##hus -honestly -diploma -lied -thermal -auxiliary -feast -unlikely -iata -##tel -morocco -pounding -treasury -lithuania -considerably -1841 -dish -1812 -geological -matching -stumbled -destroying -marched -brien -advances -cake -nicole -belle -settling -measuring -directing -##mie -tuesday -bassist -capabilities -stunned -fraud -torpedo -##list -##phone -anton -wisdom -surveillance -ruined -##ulate -lawsuit -healthcare -theorem -halls -trend -aka -horizontal -dozens -acquire -lasting -swim -hawk -gorgeous -fees -vicinity -decrease -adoption -tactics -##ography -pakistani -##ole -draws -##hall -willie -burke -heath -algorithm -integral -powder -elliott -brigadier -jackie -tate -varieties -darker -##cho -lately -cigarette -specimens -adds -##ree -##ensis -##inger -exploded -finalist -cia -murders -wilderness -arguments -nicknamed -acceptance -onwards -manufacture -robertson -jets -tampa -enterprises -blog -loudly -composers -nominations -1838 -ai -malta -inquiry -automobile -hosting -viii -rays -tilted -grief -museums -strategies -furious -euro -equality -cohen -poison -surrey -wireless -governed -ridiculous -moses -##esh -##room -vanished -##ito -barnes -attract -morrison -istanbul -##iness -absent -rotation -petition -janet -##logical -satisfaction -custody -deliberately -observatory -comedian -surfaces -pinyin -novelist -strictly -canterbury -oslo -monks -embrace -ibm -jealous -photograph -continent -dorothy -marina -doc -excess -holden -allegations -explaining -stack -avoiding -lance -storyline -majesty -poorly -spike -dos -bradford -raven -travis -classics -proven -voltage -pillow -fists -butt -1842 -interpreted -##car -1839 -gage -telegraph -lens -promising -expelled -casual -collector -zones -##min -silly -nintendo -##kh -##bra -downstairs -chef -suspicious -afl -flies -vacant -uganda -pregnancy -condemned -lutheran -estimates -cheap -decree -saxon -proximity -stripped -idiot -deposits -contrary -presenter -magnus -glacier -im -offense -edwin -##ori -upright -##long -bolt -##ois -toss -geographical -##izes -environments -delicate -marking -abstract -xavier -nails -windsor -plantation -occurring -equity -saskatchewan -fears -drifted -sequences -vegetation -revolt -##stic -1843 -sooner -fusion -opposing -nato -skating -1836 -secretly -ruin -lease -##oc -edit -##nne -flora -anxiety -ruby -##ological -##mia -tel -bout -taxi -emmy -frost -rainbow -compounds -foundations -rainfall -assassination -nightmare -dominican -##win -achievements -deserve -orlando -intact -armenia -##nte -calgary -valentine -106 -marion -proclaimed -theodore -bells -courtyard -thigh -gonzalez -console -troop -minimal -monte -everyday -##ence -##if -supporter -terrorism -buck -openly -presbyterian -activists -carpet -##iers -rubbing -uprising -##yi -cute -conceived -legally -##cht -millennium -cello -velocity -ji -rescued -cardiff -1835 -rex -concentrate -senators -beard -rendered -glowing -battalions -scouts -competitors -sculptor -catalogue -arctic -ion -raja -bicycle -wow -glancing -lawn -##woman -gentleman -lighthouse -publish -predicted -calculated -##val -variants -##gne -strain -##ui -winston -deceased -##nus -touchdowns -brady -caleb -sinking -echoed -crush -hon -blessed -protagonist -hayes -endangered -magnitude -editors -##tine -estimate -responsibilities -##mel -backup -laying -consumed -sealed -zurich -lovers -frustrated -##eau -ahmed -kicking -mit -treasurer -1832 -biblical -refuse -terrified -pump -agrees -genuine -imprisonment -refuses -plymouth -##hen -lou -##nen -tara -trembling -antarctic -ton -learns -##tas -crap -crucial -faction -atop -##borough -wrap -lancaster -odds -hopkins -erik -lyon -##eon -bros -##ode -snap -locality -tips -empress -crowned -cal -acclaimed -chuckled -##ory -clara -sends -mild -towel -##fl -##day -##а -wishing -assuming -interviewed -##bal -##die -interactions -eden -cups -helena -##lf -indie -beck -##fire -batteries -filipino -wizard -parted -##lam -traces -##born -rows -idol -albany -delegates -##ees -##sar -discussions -##ex -notre -instructed -belgrade -highways -suggestion -lauren -possess -orientation -alexandria -abdul -beats -salary -reunion -ludwig -alright -wagner -intimate -pockets -slovenia -hugged -brighton -merchants -cruel -stole -trek -slopes -repairs -enrollment -politically -underlying -promotional -counting -boeing -##bb -isabella -naming -##и -keen -bacteria -listing -separately -belfast -ussr -450 -lithuanian -anybody -ribs -sphere -martinez -cock -embarrassed -proposals -fragments -nationals -##fs -##wski -premises -fin -1500 -alpine -matched -freely -bounded -jace -sleeve -##af -gaming -pier -populated -evident -##like -frances -flooded -##dle -frightened -pour -trainer -framed -visitor -challenging -pig -wickets -##fold -infected -email -##pes -arose -##aw -reward -ecuador -oblast -vale -ch -shuttle -##usa -bach -rankings -forbidden -cornwall -accordance -salem -consumers -bruno -fantastic -toes -machinery -resolved -julius -remembering -propaganda -iceland -bombardment -tide -contacts -wives -##rah -concerto -macdonald -albania -implement -daisy -tapped -sudan -helmet -angela -mistress -##lic -crop -sunk -finest -##craft -hostile -##ute -##tsu -boxer -fr -paths -adjusted -habit -ballot -supervision -soprano -##zen -bullets -wicked -sunset -regiments -disappear -lamp -performs -app -##gia -##oa -rabbit -digging -incidents -entries -##cion -dishes -##oi -introducing -##ati -##fied -freshman -slot -jill -tackles -baroque -backs -##iest -lone -sponsor -destiny -altogether -convert -##aro -consensus -shapes -demonstration -basically -feminist -auction -artifacts -##bing -strongest -twitter -halifax -2019 -allmusic -mighty -smallest -precise -alexandra -viola -##los -##ille -manuscripts -##illo -dancers -ari -managers -monuments -blades -barracks -springfield -maiden -consolidated -electron -##end -berry -airing -wheat -nobel -inclusion -blair -payments -geography -bee -cc -eleanor -react -##hurst -afc -manitoba -##yu -su -lineup -fitness -recreational -investments -airborne -disappointment -##dis -edmonton -viewing -##row -renovation -##cast -infant -bankruptcy -roses -aftermath -pavilion -##yer -carpenter -withdrawal -ladder -##hy -discussing -popped -reliable -agreements -rochester -##abad -curves -bombers -220 -rao -reverend -decreased -choosing -107 -stiff -consulting -naples -crawford -tracy -ka -ribbon -cops -##lee -crushed -deciding -unified -teenager -accepting -flagship -explorer -poles -sanchez -inspection -revived -skilled -induced -exchanged -flee -locals -tragedy -swallow -loading -hanna -demonstrate -##ela -salvador -flown -contestants -civilization -##ines -wanna -rhodes -fletcher -hector -knocking -considers -##ough -nash -mechanisms -sensed -mentally -walt -unclear -##eus -renovated -madame -##cks -crews -governmental -##hin -undertaken -monkey -##ben -##ato -fatal -armored -copa -caves -governance -grasp -perception -certification -froze -damp -tugged -wyoming -##rg -##ero -newman -##lor -nerves -curiosity -graph -115 -##ami -withdraw -tunnels -dull -meredith -moss -exhibits -neighbors -communicate -accuracy -explored -raiders -republicans -secular -kat -superman -penny -criticised -##tch -freed -update -conviction -wade -ham -likewise -delegation -gotta -doll -promises -technological -myth -nationality -resolve -convent -##mark -sharon -dig -sip -coordinator -entrepreneur -fold -##dine -capability -councillor -synonym -blown -swan -cursed -1815 -jonas -haired -sofa -canvas -keeper -rivalry -##hart -rapper -speedway -swords -postal -maxwell -estonia -potter -recurring -##nn -##ave -errors -##oni -cognitive -1834 -##² -claws -nadu -roberto -bce -wrestler -ellie -##ations -infinite -ink -##tia -presumably -finite -staircase -108 -noel -patricia -nacional -##cation -chill -eternal -tu -preventing -prussia -fossil -limbs -##logist -ernst -frog -perez -rene -##ace -pizza -prussian -##ios -##vy -molecules -regulatory -answering -opinions -sworn -lengths -supposedly -hypothesis -upward -habitats -seating -ancestors -drank -yield -hd -synthesis -researcher -modest -##var -mothers -peered -voluntary -homeland -##the -acclaim -##igan -static -valve -luxembourg -alto -carroll -fe -receptor -norton -ambulance -##tian -johnston -catholics -depicting -jointly -elephant -gloria -mentor -badge -ahmad -distinguish -remarked -councils -precisely -allison -advancing -detection -crowded -##10 -cooperative -ankle -mercedes -dagger -surrendered -pollution -commit -subway -jeffrey -lesson -sculptures -provider -##fication -membrane -timothy -rectangular -fiscal -heating -teammate -basket -particle -anonymous -deployment -##ple -missiles -courthouse -proportion -shoe -sec -##ller -complaints -forbes -blacks -abandon -remind -sizes -overwhelming -autobiography -natalie -##awa -risks -contestant -countryside -babies -scorer -invaded -enclosed -proceed -hurling -disorders -##cu -reflecting -continuously -cruiser -graduates -freeway -investigated -ore -deserved -maid -blocking -phillip -jorge -shakes -dove -mann -variables -lacked -burden -accompanying -que -consistently -organizing -provisional -complained -endless -##rm -tubes -juice -georges -krishna -mick -labels -thriller -##uch -laps -arcade -sage -snail -##table -shannon -fi -laurence -seoul -vacation -presenting -hire -churchill -surprisingly -prohibited -savannah -technically -##oli -170 -##lessly -testimony -suited -speeds -toys -romans -mlb -flowering -measurement -talented -kay -settings -charleston -expectations -shattered -achieving -triumph -ceremonies -portsmouth -lanes -mandatory -loser -stretching -cologne -realizes -seventy -cornell -careers -webb -##ulating -americas -budapest -ava -suspicion -##ison -yo -conrad -##hai -sterling -jessie -rector -##az -1831 -transform -organize -loans -christine -volcanic -warrant -slender -summers -subfamily -newer -danced -dynamics -rhine -proceeds -heinrich -gastropod -commands -sings -facilitate -easter -ra -positioned -responses -expense -fruits -yanked -imported -25th -velvet -vic -primitive -tribune -baldwin -neighbourhood -donna -rip -hay -pr -##uro -1814 -espn -welcomed -##aria -qualifier -glare -highland -timing -##cted -shells -eased -geometry -louder -exciting -slovakia -##sion -##iz -##lot -savings -prairie -##ques -marching -rafael -tonnes -##lled -curtain -preceding -shy -heal -greene -worthy -##pot -detachment -bury -sherman -##eck -reinforced -seeks -bottles -contracted -duchess -outfit -walsh -##sc -mickey -##ase -geoffrey -archer -squeeze -dawson -eliminate -invention -##enberg -neal -##eth -stance -dealer -coral -maple -retire -polo -simplified -##ht -1833 -hid -watts -backwards -jules -##oke -genesis -mt -frames -rebounds -burma -woodland -moist -santos -whispers -drained -subspecies -##aa -streaming -ulster -burnt -correspondence -maternal -gerard -denis -stealing -##load -genius -duchy -##oria -inaugurated -momentum -suits -placement -sovereign -clause -thames -##hara -confederation -reservation -sketch -yankees -lets -rotten -charm -hal -verses -ultra -commercially -dot -salon -citation -adopt -winnipeg -mist -allocated -cairo -##boy -jenkins -interference -objectives -##wind -1820 -portfolio -armoured -sectors -##eh -initiatives -##world -integrity -exercises -robe -tap -ab -gazed -##tones -distracted -rulers -111 -favorable -jerome -tended -cart -factories -##eri -diplomat -valued -gravel -charitable -##try -calvin -exploring -chang -shepherd -terrace -pdf -pupil -##ural -reflects -ups -##rch -governors -shelf -depths -##nberg -trailed -crest -tackle -##nian -##ats -hatred -##kai -clare -makers -ethiopia -longtime -detected -embedded -lacking -slapped -rely -thomson -anticipation -iso -morton -successive -agnes -screenwriter -straightened -philippe -playwright -haunted -licence -iris -intentions -sutton -112 -logical -correctly -##weight -branded -licked -tipped -silva -ricky -narrator -requests -##ents -greeted -supernatural -cow -##wald -lung -refusing -employer -strait -gaelic -liner -##piece -zoe -sabha -##mba -driveway -harvest -prints -bates -reluctantly -threshold -algebra -ira -wherever -coupled -240 -assumption -picks -##air -designers -raids -gentlemen -##ean -roller -blowing -leipzig -locks -screw -dressing -strand -##lings -scar -dwarf -depicts -##nu -nods -##mine -differ -boris -##eur -yuan -flip -##gie -mob -invested -questioning -applying -##ture -shout -##sel -gameplay -blamed -illustrations -bothered -weakness -rehabilitation -##of -##zes -envelope -rumors -miners -leicester -subtle -kerry -##ico -ferguson -##fu -premiership -ne -##cat -bengali -prof -catches -remnants -dana -##rily -shouting -presidents -baltic -ought -ghosts -dances -sailors -shirley -fancy -dominic -##bie -madonna -##rick -bark -buttons -gymnasium -ashes -liver -toby -oath -providence -doyle -evangelical -nixon -cement -carnegie -embarked -hatch -surroundings -guarantee -needing -pirate -essence -##bee -filter -crane -hammond -projected -immune -percy -twelfth -##ult -regent -doctoral -damon -mikhail -##ichi -lu -critically -elect -realised -abortion -acute -screening -mythology -steadily -##fc -frown -nottingham -kirk -wa -minneapolis -##rra -module -algeria -mc -nautical -encounters -surprising -statues -availability -shirts -pie -alma -brows -munster -mack -soup -crater -tornado -sanskrit -cedar -explosive -bordered -dixon -planets -stamp -exam -happily -##bble -carriers -kidnapped -##vis -accommodation -emigrated -##met -knockout -correspondent -violation -profits -peaks -lang -specimen -agenda -ancestry -pottery -spelling -equations -obtaining -ki -linking -1825 -debris -asylum -##20 -buddhism -teddy -##ants -gazette -##nger -##sse -dental -eligibility -utc -fathers -averaged -zimbabwe -francesco -coloured -hissed -translator -lynch -mandate -humanities -mackenzie -uniforms -lin -##iana -##gio -asset -mhz -fitting -samantha -genera -wei -rim -beloved -shark -riot -entities -expressions -indo -carmen -slipping -owing -abbot -neighbor -sidney -##av -rats -recommendations -encouraging -squadrons -anticipated -commanders -conquered -##oto -donations -diagnosed -##mond -divide -##iva -guessed -decoration -vernon -auditorium -revelation -conversations -##kers -##power -herzegovina -dash -alike -protested -lateral -herman -accredited -mg -##gent -freeman -mel -fiji -crow -crimson -##rine -livestock -##pped -humanitarian -bored -oz -whip -##lene -##ali -legitimate -alter -grinning -spelled -anxious -oriental -wesley -##nin -##hole -carnival -controller -detect -##ssa -bowed -educator -kosovo -macedonia -##sin -occupy -mastering -stephanie -janeiro -para -unaware -nurses -noon -135 -cam -hopefully -ranger -combine -sociology -polar -rica -##eer -neill -##sman -holocaust -##ip -doubled -lust -1828 -109 -decent -cooling -unveiled -##card -1829 -nsw -homer -chapman -meyer -##gin -dive -mae -reagan -expertise -##gled -darwin -brooke -sided -prosecution -investigating -comprised -petroleum -genres -reluctant -differently -trilogy -johns -vegetables -corpse -highlighted -lounge -pension -unsuccessfully -elegant -aided -ivory -beatles -amelia -cain -dubai -sunny -immigrant -babe -click -##nder -underwater -pepper -combining -mumbled -atlas -horns -accessed -ballad -physicians -homeless -gestured -rpm -freak -louisville -corporations -patriots -prizes -rational -warn -modes -decorative -overnight -din -troubled -phantom -##ort -monarch -sheer -##dorf -generals -guidelines -organs -addresses -##zon -enhance -curling -parishes -cord -##kie -linux -caesar -deutsche -bavaria -##bia -coleman -cyclone -##eria -bacon -petty -##yama -##old -hampton -diagnosis -1824 -throws -complexity -rita -disputed -##₃ -pablo -##sch -marketed -trafficking -##ulus -examine -plague -formats -##oh -vault -faithful -##bourne -webster -##ox -highlights -##ient -##ann -phones -vacuum -sandwich -modeling -##gated -bolivia -clergy -qualities -isabel -##nas -##ars -wears -screams -reunited -annoyed -bra -##ancy -##rate -differential -transmitter -tattoo -container -poker -##och -excessive -resides -cowboys -##tum -augustus -trash -providers -statute -retreated -balcony -reversed -void -storey -preceded -masses -leap -laughs -neighborhoods -wards -schemes -falcon -santo -battlefield -pad -ronnie -thread -lesbian -venus -##dian -beg -sandstone -daylight -punched -gwen -analog -stroked -wwe -acceptable -measurements -dec -toxic -##kel -adequate -surgical -economist -parameters -varsity -##sberg -quantity -ella -##chy -##rton -countess -generating -precision -diamonds -expressway -ga -##ı -1821 -uruguay -talents -galleries -expenses -scanned -colleague -outlets -ryder -lucien -##ila -paramount -##bon -syracuse -dim -fangs -gown -sweep -##sie -toyota -missionaries -websites -##nsis -sentences -adviser -val -trademark -spells -##plane -patience -starter -slim -##borg -toe -incredibly -shoots -elliot -nobility -##wyn -cowboy -endorsed -gardner -tendency -persuaded -organisms -emissions -kazakhstan -amused -boring -chips -themed -##hand -llc -constantinople -chasing -systematic -guatemala -borrowed -erin -carey -##hard -highlands -struggles -1810 -##ifying -##ced -wong -exceptions -develops -enlarged -kindergarten -castro -##ern -##rina -leigh -zombie -juvenile -##most -consul -##nar -sailor -hyde -clarence -intensive -pinned -nasty -useless -jung -clayton -stuffed -exceptional -ix -apostolic -230 -transactions -##dge -exempt -swinging -cove -religions -##ash -shields -dairy -bypass -190 -pursuing -bug -joyce -bombay -chassis -southampton -chat -interact -redesignated -##pen -nascar -pray -salmon -rigid -regained -malaysian -grim -publicity -constituted -capturing -toilet -delegate -purely -tray -drift -loosely -striker -weakened -trinidad -mitch -itv -defines -transmitted -ming -scarlet -nodding -fitzgerald -fu -narrowly -sp -tooth -standings -virtue -##₁ -##wara -##cting -chateau -gloves -lid -##nel -hurting -conservatory -##pel -sinclair -reopened -sympathy -nigerian -strode -advocated -optional -chronic -discharge -##rc -suck -compatible -laurel -stella -shi -fails -wage -dodge -128 -informal -sorts -levi -buddha -villagers -##aka -chronicles -heavier -summoned -gateway -3000 -eleventh -jewelry -translations -accordingly -seas -##ency -fiber -pyramid -cubic -dragging -##ista -caring -##ops -android -contacted -lunar -##dt -kai -lisbon -patted -1826 -sacramento -theft -madagascar -subtropical -disputes -ta -holidays -piper -willow -mare -cane -itunes -newfoundland -benny -companions -dong -raj -observe -roar -charming -plaque -tibetan -fossils -enacted -manning -bubble -tina -tanzania -##eda -##hir -funk -swamp -deputies -cloak -ufc -scenario -par -scratch -metals -anthem -guru -engaging -specially -##boat -dialects -nineteen -cecil -duet -disability -messenger -unofficial -##lies -defunct -eds -moonlight -drainage -surname -puzzle -honda -switching -conservatives -mammals -knox -broadcaster -sidewalk -cope -##ried -benson -princes -peterson -##sal -bedford -sharks -eli -wreck -alberto -gasp -archaeology -lgbt -teaches -securities -madness -compromise -waving -coordination -davidson -visions -leased -possibilities -eighty -jun -fernandez -enthusiasm -assassin -sponsorship -reviewer -kingdoms -estonian -laboratories -##fy -##nal -applies -verb -celebrations -##zzo -rowing -lightweight -sadness -submit -mvp -balanced -dude -##vas -explicitly -metric -magnificent -mound -brett -mohammad -mistakes -irregular -##hing -##ass -sanders -betrayed -shipped -surge -##enburg -reporters -termed -georg -pity -verbal -bulls -abbreviated -enabling -appealed -##are -##atic -sicily -sting -heel -sweetheart -bart -spacecraft -brutal -monarchy -##tter -aberdeen -cameo -diane -##ub -survivor -clyde -##aries -complaint -##makers -clarinet -delicious -chilean -karnataka -coordinates -1818 -panties -##rst -pretending -ar -dramatically -kiev -bella -tends -distances -113 -catalog -launching -instances -telecommunications -portable -lindsay -vatican -##eim -angles -aliens -marker -stint -screens -bolton -##rne -judy -wool -benedict -plasma -europa -spark -imaging -filmmaker -swiftly -##een -contributor -##nor -opted -stamps -apologize -financing -butter -gideon -sophisticated -alignment -avery -chemicals -yearly -speculation -prominence -professionally -##ils -immortal -institutional -inception -wrists -identifying -tribunal -derives -gains -##wo -papal -preference -linguistic -vince -operative -brewery -##ont -unemployment -boyd -##ured -##outs -albeit -prophet -1813 -bi -##rr -##face -##rad -quarterly -asteroid -cleaned -radius -temper -##llen -telugu -jerk -viscount -menu -##ote -glimpse -##aya -yacht -hawaiian -baden -##rl -laptop -readily -##gu -monetary -offshore -scots -watches -##yang -##arian -upgrade -needle -xbox -lea -encyclopedia -flank -fingertips -##pus -delight -teachings -confirm -roth -beaches -midway -winters -##iah -teasing -daytime -beverly -gambling -bonnie -##backs -regulated -clement -hermann -tricks -knot -##shing -##uring -##vre -detached -ecological -owed -specialty -byron -inventor -bats -stays -screened -unesco -midland -trim -affection -##ander -##rry -jess -thoroughly -feedback -##uma -chennai -strained -heartbeat -wrapping -overtime -pleaded -##sworth -mon -leisure -oclc -##tate -##ele -feathers -angelo -thirds -nuts -surveys -clever -gill -commentator -##dos -darren -rides -gibraltar -##nc -##mu -dissolution -dedication -shin -meals -saddle -elvis -reds -chaired -taller -appreciation -functioning -niece -favored -advocacy -robbie -criminals -suffolk -yugoslav -passport -constable -congressman -hastings -vera -##rov -consecrated -sparks -ecclesiastical -confined -##ovich -muller -floyd -nora -1822 -paved -1827 -cumberland -ned -saga -spiral -##flow -appreciated -yi -collaborative -treating -similarities -feminine -finishes -##ib -jade -import -##nse -##hot -champagne -mice -securing -celebrities -helsinki -attributes -##gos -cousins -phases -ache -lucia -gandhi -submission -vicar -spear -shine -tasmania -biting -detention -constitute -tighter -seasonal -##gus -terrestrial -matthews -##oka -effectiveness -parody -philharmonic -##onic -1816 -strangers -encoded -consortium -guaranteed -regards -shifts -tortured -collision -supervisor -inform -broader -insight -theaters -armour -emeritus -blink -incorporates -mapping -##50 -##ein -handball -flexible -##nta -substantially -generous -thief -##own -carr -loses -1793 -prose -ucla -romeo -generic -metallic -realization -damages -mk -commissioners -zach -default -##ther -helicopters -lengthy -stems -spa -partnered -spectators -rogue -indication -penalties -teresa -1801 -sen -##tric -dalton -##wich -irving -photographic -##vey -dell -deaf -peters -excluded -unsure -##vable -patterson -crawled -##zio -resided -whipped -latvia -slower -ecole -pipes -employers -maharashtra -comparable -va -textile -pageant -##gel -alphabet -binary -irrigation -chartered -choked -antoine -offs -waking -supplement -##wen -quantities -demolition -regain -locate -urdu -folks -alt -114 -##mc -scary -andreas -whites -##ava -classrooms -mw -aesthetic -publishes -valleys -guides -cubs -johannes -bryant -conventions -affecting -##itt -drain -awesome -isolation -prosecutor -ambitious -apology -captive -downs -atmospheric -lorenzo -aisle -beef -foul -##onia -kidding -composite -disturbed -illusion -natives -##ffer -emi -rockets -riverside -wartime -painters -adolf -melted -##ail -uncertainty -simulation -hawks -progressed -meantime -builder -spray -breach -unhappy -regina -russians -##urg -determining -##tation -tram -1806 -##quin -aging -##12 -1823 -garion -rented -mister -diaz -terminated -clip -1817 -depend -nervously -disco -owe -defenders -shiva -notorious -disbelief -shiny -worcester -##gation -##yr -trailing -undertook -islander -belarus -limitations -watershed -fuller -overlooking -utilized -raphael -1819 -synthetic -breakdown -klein -##nate -moaned -memoir -lamb -practicing -##erly -cellular -arrows -exotic -##graphy -witches -117 -charted -rey -hut -hierarchy -subdivision -freshwater -giuseppe -aloud -reyes -qatar -marty -sideways -utterly -sexually -jude -prayers -mccarthy -softball -blend -damien -##gging -##metric -wholly -erupted -lebanese -negro -revenues -tasted -comparative -teamed -transaction -labeled -maori -sovereignty -parkway -trauma -gran -malay -121 -advancement -descendant -2020 -buzz -salvation -inventory -symbolic -##making -antarctica -mps -##gas -##bro -mohammed -myanmar -holt -submarines -tones -##lman -locker -patriarch -bangkok -emerson -remarks -predators -kin -afghan -confession -norwich -rental -emerge -advantages -##zel -rca -##hold -shortened -storms -aidan -##matic -autonomy -compliance -##quet -dudley -atp -##osis -1803 -motto -documentation -summary -professors -spectacular -christina -archdiocese -flashing -innocence -remake -##dell -psychic -reef -scare -employ -rs -sticks -meg -gus -leans -##ude -accompany -bergen -tomas -##iko -doom -wages -pools -##nch -##bes -breasts -scholarly -alison -outline -brittany -breakthrough -willis -realistic -##cut -##boro -competitor -##stan -pike -picnic -icon -designing -commercials -washing -villain -skiing -micro -costumes -auburn -halted -executives -##hat -logistics -cycles -vowel -applicable -barrett -exclaimed -eurovision -eternity -ramon -##umi -##lls -modifications -sweeping -disgust -##uck -torch -aviv -ensuring -rude -dusty -sonic -donovan -outskirts -cu -pathway -##band -##gun -##lines -disciplines -acids -cadet -paired -##40 -sketches -##sive -marriages -##⁺ -folding -peers -slovak -implies -admired -##beck -1880s -leopold -instinct -attained -weston -megan -horace -##ination -dorsal -ingredients -evolutionary -##its -complications -deity -lethal -brushing -levy -deserted -institutes -posthumously -delivering -telescope -coronation -motivated -rapids -luc -flicked -pays -volcano -tanner -weighed -##nica -crowds -frankie -gifted -addressing -granddaughter -winding -##rna -constantine -gomez -##front -landscapes -rudolf -anthropology -slate -werewolf -##lio -astronomy -circa -rouge -dreaming -sack -knelt -drowned -naomi -prolific -tracked -freezing -herb -##dium -agony -randall -twisting -wendy -deposit -touches -vein -wheeler -##bbled -##bor -batted -retaining -tire -presently -compare -specification -daemon -nigel -##grave -merry -recommendation -czechoslovakia -sandra -ng -roma -##sts -lambert -inheritance -sheikh -winchester -cries -examining -##yle -comeback -cuisine -nave -##iv -ko -retrieve -tomatoes -barker -polished -defining -irene -lantern -personalities -begging -tract -swore -1809 -175 -##gic -omaha -brotherhood -##rley -haiti -##ots -exeter -##ete -##zia -steele -dumb -pearson -210 -surveyed -elisabeth -trends -##ef -fritz -##rf -premium -bugs -fraction -calmly -viking -##birds -tug -inserted -unusually -##ield -confronted -distress -crashing -brent -turks -resign -##olo -cambodia -gabe -sauce -##kal -evelyn -116 -extant -clusters -quarry -teenagers -luna -##lers -##ister -affiliation -drill -##ashi -panthers -scenic -libya -anita -strengthen -inscriptions -##cated -lace -sued -judith -riots -##uted -mint -##eta -preparations -midst -dub -challenger -##vich -mock -cf -displaced -wicket -breaths -enables -schmidt -analyst -##lum -ag -highlight -automotive -axe -josef -newark -sufficiently -resembles -50th -##pal -flushed -mum -traits -##ante -commodore -incomplete -warming -titular -ceremonial -ethical -118 -celebrating -eighteenth -cao -lima -medalist -mobility -strips -snakes -##city -miniature -zagreb -barton -escapes -umbrella -automated -doubted -differs -cooled -georgetown -dresden -cooked -fade -wyatt -rna -jacobs -carlton -abundant -stereo -boost -madras -inning -##hia -spur -ip -malayalam -begged -osaka -groan -escaping -charging -dose -vista -##aj -bud -papa -communists -advocates -edged -tri -##cent -resemble -peaking -necklace -fried -montenegro -saxony -goose -glances -stuttgart -curator -recruit -grocery -sympathetic -##tting -##fort -127 -lotus -randolph -ancestor -##rand -succeeding -jupiter -1798 -macedonian -##heads -hiking -1808 -handing -fischer -##itive -garbage -node -##pies -prone -singular -papua -inclined -attractions -italia -pouring -motioned -grandma -garnered -jacksonville -corp -ego -ringing -aluminum -##hausen -ordering -##foot -drawer -traders -synagogue -##play -##kawa -resistant -wandering -fragile -fiona -teased -var -hardcore -soaked -jubilee -decisive -exposition -mercer -poster -valencia -hale -kuwait -1811 -##ises -##wr -##eed -tavern -gamma -122 -johan -##uer -airways -amino -gil -##ury -vocational -domains -torres -##sp -generator -folklore -outcomes -##keeper -canberra -shooter -fl -beams -confrontation -##lling -##gram -feb -aligned -forestry -pipeline -jax -motorway -conception -decay -##tos -coffin -##cott -stalin -1805 -escorted -minded -##nam -sitcom -purchasing -twilight -veronica -additions -passive -tensions -straw -123 -frequencies -1804 -refugee -cultivation -##iate -christie -clary -bulletin -crept -disposal -##rich -##zong -processor -crescent -##rol -bmw -emphasized -whale -nazis -aurora -##eng -dwelling -hauled -sponsors -toledo -mega -ideology -theatres -tessa -cerambycidae -saves -turtle -cone -suspects -kara -rusty -yelling -greeks -mozart -shades -cocked -participant -##tro -shire -spit -freeze -necessity -##cos -inmates -nielsen -councillors -loaned -uncommon -omar -peasants -botanical -offspring -daniels -formations -jokes -1794 -pioneers -sigma -licensing -##sus -wheelchair -polite -1807 -liquor -pratt -trustee -##uta -forewings -balloon -##zz -kilometre -camping -explicit -casually -shawn -foolish -teammates -nm -hassan -carrie -judged -satisfy -vanessa -knives -selective -cnn -flowed -##lice -eclipse -stressed -eliza -mathematician -cease -cultivated -##roy -commissions -browns -##ania -destroyers -sheridan -meadow -##rius -minerals -##cial -downstream -clash -gram -memoirs -ventures -baha -seymour -archie -midlands -edith -fare -flynn -invite -canceled -tiles -stabbed -boulder -incorporate -amended -camden -facial -mollusk -unreleased -descriptions -yoga -grabs -550 -raises -ramp -shiver -##rose -coined -pioneering -tunes -qing -warwick -tops -119 -melanie -giles -##rous -wandered -##inal -annexed -nov -30th -unnamed -##ished -organizational -airplane -normandy -stoke -whistle -blessing -violations -chased -holders -shotgun -##ctic -outlet -reactor -##vik -tires -tearing -shores -fortified -mascot -constituencies -nc -columnist -productive -tibet -##rta -lineage -hooked -oct -tapes -judging -cody -##gger -hansen -kashmir -triggered -##eva -solved -cliffs -##tree -resisted -anatomy -protesters -transparent -implied -##iga -injection -mattress -excluding -##mbo -defenses -helpless -devotion -##elli -growl -liberals -weber -phenomena -atoms -plug -##iff -mortality -apprentice -howe -convincing -aaa -swimmer -barber -leone -promptly -sodium -def -nowadays -arise -##oning -gloucester -corrected -dignity -norm -erie -##ders -elders -evacuated -sylvia -compression -##yar -hartford -pose -backpack -reasoning -accepts -24th -wipe -millimetres -marcel -##oda -dodgers -albion -1790 -overwhelmed -aerospace -oaks -1795 -showcase -acknowledge -recovering -nolan -ashe -hurts -geology -fashioned -disappearance -farewell -swollen -shrug -marquis -wimbledon -124 -rue -1792 -commemorate -reduces -experiencing -inevitable -calcutta -intel -##court -murderer -sticking -fisheries -imagery -bloom -280 -brake -##inus -gustav -hesitation -memorable -po -viral -beans -accidents -tunisia -antenna -spilled -consort -treatments -aye -perimeter -##gard -donation -hostage -migrated -banker -addiction -apex -lil -trout -##ously -conscience -##nova -rams -sands -genome -passionate -troubles -##lets -##set -amid -##ibility -##ret -higgins -exceed -vikings -##vie -payne -##zan -muscular -##ste -defendant -sucking -##wal -ibrahim -fuselage -claudia -vfl -europeans -snails -interval -##garh -preparatory -statewide -tasked -lacrosse -viktor -##lation -angola -##hra -flint -implications -employs -teens -patrons -stall -weekends -barriers -scrambled -nucleus -tehran -jenna -parsons -lifelong -robots -displacement -5000 -##bles -precipitation -##gt -knuckles -clutched -1802 -marrying -ecology -marx -accusations -declare -scars -kolkata -mat -meadows -bermuda -skeleton -finalists -vintage -crawl -coordinate -affects -subjected -orchestral -mistaken -##tc -mirrors -dipped -relied -260 -arches -candle -##nick -incorporating -wildly -fond -basilica -owl -fringe -rituals -whispering -stirred -feud -tertiary -slick -goat -honorable -whereby -skip -ricardo -stripes -parachute -adjoining -submerged -synthesizer -##gren -intend -positively -ninety -phi -beaver -partition -fellows -alexis -prohibition -carlisle -bizarre -fraternity -##bre -doubts -icy -cbc -aquatic -sneak -sonny -combines -airports -crude -supervised -spatial -merge -alfonso -##bic -corrupt -scan -undergo -##ams -disabilities -colombian -comparing -dolphins -perkins -##lish -reprinted -unanimous -bounced -hairs -underworld -midwest -semester -bucket -paperback -miniseries -coventry -demise -##leigh -demonstrations -sensor -rotating -yan -##hler -arrange -soils -##idge -hyderabad -labs -##dr -brakes -grandchildren -##nde -negotiated -rover -ferrari -continuation -directorate -augusta -stevenson -counterpart -gore -##rda -nursery -rican -ave -collectively -broadly -pastoral -repertoire -asserted -discovering -nordic -styled -fiba -cunningham -harley -middlesex -survives -tumor -tempo -zack -aiming -lok -urgent -##rade -##nto -devils -##ement -contractor -turin -##wl -##ool -bliss -repaired -simmons -moan -astronomical -cr -negotiate -lyric -1890s -lara -bred -clad -angus -pbs -##ience -engineered -posed -##lk -hernandez -possessions -elbows -psychiatric -strokes -confluence -electorate -lifts -campuses -lava -alps -##ep -##ution -##date -physicist -woody -##page -##ographic -##itis -juliet -reformation -sparhawk -320 -complement -suppressed -jewel -##½ -floated -##kas -continuity -sadly -##ische -inability -melting -scanning -paula -flour -judaism -safer -vague -##lm -solving -curb -##stown -financially -gable -bees -expired -miserable -cassidy -dominion -1789 -cupped -145 -robbery -facto -amos -warden -resume -tallest -marvin -ing -pounded -usd -declaring -gasoline -##aux -darkened -270 -650 -sophomore -##mere -erection -gossip -televised -risen -dial -##eu -pillars -##link -passages -profound -##tina -arabian -ashton -silicon -nail -##ead -##lated -##wer -##hardt -fleming -firearms -ducked -circuits -blows -waterloo -titans -##lina -atom -fireplace -cheshire -financed -activation -algorithms -##zzi -constituent -catcher -cherokee -partnerships -sexuality -platoon -tragic -vivian -guarded -whiskey -meditation -poetic -##late -##nga -##ake -porto -listeners -dominance -kendra -mona -chandler -factions -22nd -salisbury -attitudes -derivative -##ido -##haus -intake -paced -javier -illustrator -barrels -bias -cockpit -burnett -dreamed -ensuing -##anda -receptors -someday -hawkins -mattered -##lal -slavic -1799 -jesuit -cameroon -wasted -tai -wax -lowering -victorious -freaking -outright -hancock -librarian -sensing -bald -calcium -myers -tablet -announcing -barack -shipyard -pharmaceutical -##uan -greenwich -flush -medley -patches -wolfgang -pt -speeches -acquiring -exams -nikolai -##gg -hayden -kannada -##type -reilly -##pt -waitress -abdomen -devastated -capped -pseudonym -pharmacy -fulfill -paraguay -1796 -clicked -##trom -archipelago -syndicated -##hman -lumber -orgasm -rejection -clifford -lorraine -advent -mafia -rodney -brock -##ght -##used -##elia -cassette -chamberlain -despair -mongolia -sensors -developmental -upstream -##eg -##alis -spanning -165 -trombone -basque -seeded -interred -renewable -rhys -leapt -revision -molecule -##ages -chord -vicious -nord -shivered -23rd -arlington -debts -corpus -sunrise -bays -blackburn -centimetres -##uded -shuddered -gm -strangely -gripping -cartoons -isabelle -orbital -##ppa -seals -proving -##lton -refusal -strengthened -bust -assisting -baghdad -batsman -portrayal -mara -pushes -spears -og -##cock -reside -nathaniel -brennan -1776 -confirmation -caucus -##worthy -markings -yemen -nobles -ku -lazy -viewer -catalan -encompasses -sawyer -##fall -sparked -substances -patents -braves -arranger -evacuation -sergio -persuade -dover -tolerance -penguin -cum -jockey -insufficient -townships -occupying -declining -plural -processed -projection -puppet -flanders -introduces -liability -##yon -gymnastics -antwerp -taipei -hobart -candles -jeep -wes -observers -126 -chaplain -bundle -glorious -##hine -hazel -flung -sol -excavations -dumped -stares -sh -bangalore -triangular -icelandic -intervals -expressing -turbine -##vers -songwriting -crafts -##igo -jasmine -ditch -rite -##ways -entertaining -comply -sorrow -wrestlers -basel -emirates -marian -rivera -helpful -##some -caution -downward -networking -##atory -##tered -darted -genocide -emergence -replies -specializing -spokesman -convenient -unlocked -fading -augustine -concentrations -resemblance -elijah -investigator -andhra -##uda -promotes -bean -##rrell -fleeing -wan -simone -announcer -##ame -##bby -lydia -weaver -132 -residency -modification -##fest -stretches -##ast -alternatively -nat -lowe -lacks -##ented -pam -tile -concealed -inferior -abdullah -residences -tissues -vengeance -##ided -moisture -peculiar -groove -zip -bologna -jennings -ninja -oversaw -zombies -pumping -batch -livingston -emerald -installations -1797 -peel -nitrogen -rama -##fying -##star -schooling -strands -responding -werner -##ost -lime -casa -accurately -targeting -##rod -underway -##uru -hemisphere -lester -##yard -occupies -2d -griffith -angrily -reorganized -##owing -courtney -deposited -##dd -##30 -estadio -##ifies -dunn -exiled -##ying -checks -##combe -##о -##fly -successes -unexpectedly -blu -assessed -##flower -##ه -observing -sacked -spiders -kn -##tail -mu -nodes -prosperity -audrey -divisional -155 -broncos -tangled -adjust -feeds -erosion -paolo -surf -directory -snatched -humid -admiralty -screwed -gt -reddish -##nese -modules -trench -lamps -bind -leah -bucks -competes -##nz -##form -transcription -##uc -isles -violently -clutching -pga -cyclist -inflation -flats -ragged -unnecessary -##hian -stubborn -coordinated -harriet -baba -disqualified -330 -insect -wolfe -##fies -reinforcements -rocked -duel -winked -embraced -bricks -##raj -hiatus -defeats -pending -brightly -jealousy -##xton -##hm -##uki -lena -gdp -colorful -##dley -stein -kidney -##shu -underwear -wanderers -##haw -##icus -guardians -m³ -roared -habits -##wise -permits -gp -uranium -punished -disguise -bundesliga -elise -dundee -erotic -partisan -pi -collectors -float -individually -rendering -behavioral -bucharest -ser -hare -valerie -corporal -nutrition -proportional -##isa -immense -##kis -pavement -##zie -##eld -sutherland -crouched -1775 -##lp -suzuki -trades -endurance -operas -crosby -prayed -priory -rory -socially -##urn -gujarat -##pu -walton -cube -pasha -privilege -lennon -floods -thorne -waterfall -nipple -scouting -approve -##lov -minorities -voter -dwight -extensions -assure -ballroom -slap -dripping -privileges -rejoined -confessed -demonstrating -patriotic -yell -investor -##uth -pagan -slumped -squares -##cle -##kins -confront -bert -embarrassment -##aid -aston -urging -sweater -starr -yuri -brains -williamson -commuter -mortar -structured -selfish -exports -##jon -cds -##him -unfinished -##rre -mortgage -destinations -##nagar -canoe -solitary -buchanan -delays -magistrate -fk -##pling -motivation -##lier -##vier -recruiting -assess -##mouth -malik -antique -1791 -pius -rahman -reich -tub -zhou -smashed -airs -galway -xii -conditioning -honduras -discharged -dexter -##pf -lionel -129 -debates -lemon -tiffany -volunteered -dom -dioxide -procession -devi -sic -tremendous -advertisements -colts -transferring -verdict -hanover -decommissioned -utter -relate -pac -racism -##top -beacon -limp -similarity -terra -occurrence -ant -##how -becky -capt -updates -armament -richie -pal -##graph -halloween -mayo -##ssen -##bone -cara -serena -fcc -dolls -obligations -##dling -violated -lafayette -jakarta -exploitation -##ime -infamous -iconic -##lah -##park -kitty -moody -reginald -dread -spill -crystals -olivier -modeled -bluff -equilibrium -separating -notices -ordnance -extinction -onset -cosmic -attachment -sammy -expose -privy -anchored -##bil -abbott -admits -bending -baritone -emmanuel -policeman -vaughan -winged -climax -dresses -denny -polytechnic -mohamed -burmese -authentic -nikki -genetics -grandparents -homestead -gaza -postponed -metacritic -una -##sby -##bat -unstable -dissertation -##rial -##cian -curls -obscure -uncovered -bronx -praying -disappearing -##hoe -prehistoric -coke -turret -mutations -nonprofit -pits -monaco -##ي -##usion -prominently -dispatched -podium -##mir -uci -##uation -133 -fortifications -birthplace -kendall -##lby -##oll -preacher -rack -goodman -##rman -persistent -##ott -countless -jaime -recorder -lexington -persecution -jumps -renewal -wagons -##11 -crushing -##holder -decorations -##lake -abundance -wrath -laundry -£1 -garde -##rp -jeanne -beetles -peasant -##sl -splitting -caste -sergei -##rer -##ema -scripts -##ively -rub -satellites -##vor -inscribed -verlag -scrapped -gale -packages -chick -potato -slogan -kathleen -arabs -##culture -counterparts -reminiscent -choral -##tead -rand -retains -bushes -dane -accomplish -courtesy -closes -##oth -slaughter -hague -krakow -lawson -tailed -elias -ginger -##ttes -canopy -betrayal -rebuilding -turf -##hof -frowning -allegiance -brigades -kicks -rebuild -polls -alias -nationalism -td -rowan -audition -bowie -fortunately -recognizes -harp -dillon -horrified -##oro -renault -##tics -ropes -##α -presumed -rewarded -infrared -wiping -accelerated -illustration -##rid -presses -practitioners -badminton -##iard -detained -##tera -recognizing -relates -misery -##sies -##tly -reproduction -piercing -potatoes -thornton -esther -manners -hbo -##aan -ours -bullshit -ernie -perennial -sensitivity -illuminated -rupert -##jin -##iss -##ear -rfc -nassau -##dock -staggered -socialism -##haven -appointments -nonsense -prestige -sharma -haul -##tical -solidarity -gps -##ook -##rata -igor -pedestrian -##uit -baxter -tenants -wires -medication -unlimited -guiding -impacts -diabetes -##rama -sasha -pas -clive -extraction -131 -continually -constraints -##bilities -sonata -hunted -sixteenth -chu -planting -quote -mayer -pretended -abs -spat -##hua -ceramic -##cci -curtains -pigs -pitching -##dad -latvian -sore -dayton -##sted -##qi -patrols -slice -playground -##nted -shone -stool -apparatus -inadequate -mates -treason -##ija -desires -##liga -##croft -somalia -laurent -mir -leonardo -oracle -grape -obliged -chevrolet -thirteenth -stunning -enthusiastic -##ede -accounted -concludes -currents -basil -##kovic -drought -##rica -mai -##aire -shove -posting -##shed -pilgrimage -humorous -packing -fry -pencil -wines -smells -144 -marilyn -aching -newest -clung -bon -neighbours -sanctioned -##pie -mug -##stock -drowning -##mma -hydraulic -##vil -hiring -reminder -lilly -investigators -##ncies -sour -##eous -compulsory -packet -##rion -##graphic -##elle -cannes -##inate -depressed -##rit -heroic -importantly -theresa -##tled -conway -saturn -marginal -rae -##xia -corresponds -royce -pact -jasper -explosives -packaging -aluminium -##ttered -denotes -rhythmic -spans -assignments -hereditary -outlined -originating -sundays -lad -reissued -greeting -beatrice -##dic -pillar -marcos -plots -handbook -alcoholic -judiciary -avant -slides -extract -masculine -blur -##eum -##force -homage -trembled -owens -hymn -trey -omega -signaling -socks -accumulated -reacted -attic -theo -lining -angie -distraction -primera -talbot -##key -1200 -ti -creativity -billed -##hey -deacon -eduardo -identifies -proposition -dizzy -gunner -hogan -##yam -##pping -##hol -ja -##chan -jensen -reconstructed -##berger -clearance -darius -##nier -abe -harlem -plea -dei -circled -emotionally -notation -fascist -neville -exceeded -upwards -viable -ducks -##fo -workforce -racer -limiting -shri -##lson -possesses -1600 -kerr -moths -devastating -laden -disturbing -locking -##cture -gal -fearing -accreditation -flavor -aide -1870s -mountainous -##baum -melt -##ures -motel -texture -servers -soda -##mb -herd -##nium -erect -puzzled -hum -peggy -examinations -gould -testified -geoff -ren -devised -sacks -##law -denial -posters -grunted -cesar -tutor -ec -gerry -offerings -byrne -falcons -combinations -ct -incoming -pardon -rocking -26th -avengers -flared -mankind -seller -uttar -loch -nadia -stroking -exposing -##hd -fertile -ancestral -instituted -##has -noises -prophecy -taxation -eminent -vivid -pol -##bol -dart -indirect -multimedia -notebook -upside -displaying -adrenaline -referenced -geometric -##iving -progression -##ddy -blunt -announce -##far -implementing -##lav -aggression -liaison -cooler -cares -headache -plantations -gorge -dots -impulse -thickness -ashamed -averaging -kathy -obligation -precursor -137 -fowler -symmetry -thee -225 -hears -##rai -undergoing -ads -butcher -bowler -##lip -cigarettes -subscription -goodness -##ically -browne -##hos -##tech -kyoto -donor -##erty -damaging -friction -drifting -expeditions -hardened -prostitution -152 -fauna -blankets -claw -tossing -snarled -butterflies -recruits -investigative -coated -healed -138 -communal -hai -xiii -academics -boone -psychologist -restless -lahore -stephens -mba -brendan -foreigners -printer -##pc -ached -explode -27th -deed -scratched -dared -##pole -cardiac -1780 -okinawa -proto -commando -compelled -oddly -electrons -##base -replica -thanksgiving -##rist -sheila -deliberate -stafford -tidal -representations -hercules -ou -##path -##iated -kidnapping -lenses -##tling -deficit -samoa -mouths -consuming -computational -maze -granting -smirk -razor -fixture -ideals -inviting -aiden -nominal -##vs -issuing -julio -pitt -ramsey -docks -##oss -exhaust -##owed -bavarian -draped -anterior -mating -ethiopian -explores -noticing -##nton -discarded -convenience -hoffman -endowment -beasts -cartridge -mormon -paternal -probe -sleeves -interfere -lump -deadline -##rail -jenks -bulldogs -scrap -alternating -justified -reproductive -nam -seize -descending -secretariat -kirby -coupe -grouped -smash -panther -sedan -tapping -##18 -lola -cheer -germanic -unfortunate -##eter -unrelated -##fan -subordinate -##sdale -suzanne -advertisement -##ility -horsepower -##lda -cautiously -discourse -luigi -##mans -##fields -noun -prevalent -mao -schneider -everett -surround -governorate -kira -##avia -westward -##take -misty -rails -sustainability -134 -unused -##rating -packs -toast -unwilling -regulate -thy -suffrage -nile -awe -assam -definitions -travelers -affordable -##rb -conferred -sells -undefeated -beneficial -torso -basal -repeating -remixes -##pass -bahrain -cables -fang -##itated -excavated -numbering -statutory -##rey -deluxe -##lian -forested -ramirez -derbyshire -zeus -slamming -transfers -astronomer -banana -lottery -berg -histories -bamboo -##uchi -resurrection -posterior -bowls -vaguely -##thi -thou -preserving -tensed -offence -##inas -meyrick -callum -ridden -watt -langdon -tying -lowland -snorted -daring -truman -##hale -##girl -aura -overly -filing -weighing -goa -infections -philanthropist -saunders -eponymous -##owski -latitude -perspectives -reviewing -mets -commandant -radial -##kha -flashlight -reliability -koch -vowels -amazed -ada -elaine -supper -##rth -##encies -predator -debated -soviets -cola -##boards -##nah -compartment -crooked -arbitrary -fourteenth -##ctive -havana -majors -steelers -clips -profitable -ambush -exited -packers -##tile -nude -cracks -fungi -##е -limb -trousers -josie -shelby -tens -frederic -##ος -definite -smoothly -constellation -insult -baton -discs -lingering -##nco -conclusions -lent -staging -becker -grandpa -shaky -##tron -einstein -obstacles -sk -adverse -elle -economically -##moto -mccartney -thor -dismissal -motions -readings -nostrils -treatise -##pace -squeezing -evidently -prolonged -1783 -venezuelan -je -marguerite -beirut -takeover -shareholders -##vent -denise -digit -airplay -norse -##bbling -imaginary -pills -hubert -blaze -vacated -eliminating -##ello -vine -mansfield -##tty -retrospective -barrow -borne -clutch -bail -forensic -weaving -##nett -##witz -desktop -citadel -promotions -worrying -dorset -ieee -subdivided -##iating -manned -expeditionary -pickup -synod -chuckle -185 -barney -##rz -##ffin -functionality -karachi -litigation -meanings -uc -lick -turbo -anders -##ffed -execute -curl -oppose -ankles -typhoon -##د -##ache -##asia -linguistics -compassion -pressures -grazing -perfection -##iting -immunity -monopoly -muddy -backgrounds -136 -namibia -francesca -monitors -attracting -stunt -tuition -##ии -vegetable -##mates -##quent -mgm -jen -complexes -forts -##ond -cellar -bites -seventeenth -royals -flemish -failures -mast -charities -##cular -peruvian -capitals -macmillan -ipswich -outward -frigate -postgraduate -folds -employing -##ouse -concurrently -fiery -##tai -contingent -nightmares -monumental -nicaragua -##kowski -lizard -mal -fielding -gig -reject -##pad -harding -##ipe -coastline -##cin -##nos -beethoven -humphrey -innovations -##tam -##nge -norris -doris -solicitor -huang -obey -141 -##lc -niagara -##tton -shelves -aug -bourbon -curry -nightclub -specifications -hilton -##ndo -centennial -dispersed -worm -neglected -briggs -sm -font -kuala -uneasy -plc -##nstein -##bound -##aking -##burgh -awaiting -pronunciation -##bbed -##quest -eh -optimal -zhu -raped -greens -presided -brenda -worries -##life -venetian -marxist -turnout -##lius -refined -braced -sins -grasped -sunderland -nickel -speculated -lowell -cyrillic -communism -fundraising -resembling -colonists -mutant -freddie -usc -##mos -gratitude -##run -mural -##lous -chemist -wi -reminds -28th -steals -tess -pietro -##ingen -promoter -ri -microphone -honoured -rai -sant -##qui -feather -##nson -burlington -kurdish -terrorists -deborah -sickness -##wed -##eet -hazard -irritated -desperation -veil -clarity -##rik -jewels -xv -##gged -##ows -##cup -berkshire -unfair -mysteries -orchid -winced -exhaustion -renovations -stranded -obe -infinity -##nies -adapt -redevelopment -thanked -registry -olga -domingo -noir -tudor -ole -##atus -commenting -behaviors -##ais -crisp -pauline -probable -stirling -wigan -##bian -paralympics -panting -surpassed -##rew -luca -barred -pony -famed -##sters -cassandra -waiter -carolyn -exported -##orted -andres -destructive -deeds -jonah -castles -vacancy -suv -##glass -1788 -orchard -yep -famine -belarusian -sprang -##forth -skinny -##mis -administrators -rotterdam -zambia -zhao -boiler -discoveries -##ride -##physics -lucius -disappointing -outreach -spoon -##frame -qualifications -unanimously -enjoys -regency -##iidae -stade -realism -veterinary -rodgers -dump -alain -chestnut -castile -censorship -rumble -gibbs -##itor -communion -reggae -inactivated -logs -loads -##houses -homosexual -##iano -ale -informs -##cas -phrases -plaster -linebacker -ambrose -kaiser -fascinated -850 -limerick -recruitment -forge -mastered -##nding -leinster -rooted -threaten -##strom -borneo -##hes -suggestions -scholarships -propeller -documentaries -patronage -coats -constructing -invest -neurons -comet -entirety -shouts -identities -annoying -unchanged -wary -##antly -##ogy -neat -oversight -##kos -phillies -replay -constance -##kka -incarnation -humble -skies -minus -##acy -smithsonian -##chel -guerrilla -jar -cadets -##plate -surplus -audit -##aru -cracking -joanna -louisa -pacing -##lights -intentionally -##iri -diner -nwa -imprint -australians -tong -unprecedented -bunker -naive -specialists -ark -nichols -railing -leaked -pedal -##uka -shrub -longing -roofs -v8 -captains -neural -tuned -##ntal -##jet -emission -medina -frantic -codex -definitive -sid -abolition -intensified -stocks -enrique -sustain -genoa -oxide -##written -clues -cha -##gers -tributaries -fragment -venom -##rity -##ente -##sca -muffled -vain -sire -laos -##ingly -##hana -hastily -snapping -surfaced -sentiment -motive -##oft -contests -approximate -mesa -luckily -dinosaur -exchanges -propelled -accord -bourne -relieve -tow -masks -offended -##ues -cynthia -##mmer -rains -bartender -zinc -reviewers -lois -##sai -legged -arrogant -rafe -rosie -comprise -handicap -blockade -inlet -lagoon -copied -drilling -shelley -petals -##inian -mandarin -obsolete -##inated -onward -arguably -productivity -cindy -praising -seldom -busch -discusses -raleigh -shortage -ranged -stanton -encouragement -firstly -conceded -overs -temporal -##uke -cbe -##bos -woo -certainty -pumps -##pton -stalked -##uli -lizzie -periodic -thieves -weaker -##night -gases -shoving -chooses -wc -##chemical -prompting -weights -##kill -robust -flanked -sticky -hu -tuberculosis -##eb -##eal -christchurch -resembled -wallet -reese -inappropriate -pictured -distract -fixing -fiddle -giggled -burger -heirs -hairy -mechanic -torque -apache -obsessed -chiefly -cheng -logging -##tag -extracted -meaningful -numb -##vsky -gloucestershire -reminding -##bay -unite -##lit -breeds -diminished -clown -glove -1860s -##ن -##ug -archibald -focal -freelance -sliced -depiction -##yk -organism -switches -sights -stray -crawling -##ril -lever -leningrad -interpretations -loops -anytime -reel -alicia -delighted -##ech -inhaled -xiv -suitcase -bernie -vega -licenses -northampton -exclusion -induction -monasteries -racecourse -homosexuality -##right -##sfield -##rky -dimitri -michele -alternatives -ions -commentators -genuinely -objected -pork -hospitality -fencing -stephan -warships -peripheral -wit -drunken -wrinkled -quentin -spends -departing -chung -numerical -spokesperson -##zone -johannesburg -caliber -killers -##udge -assumes -neatly -demographic -abigail -bloc -##vel -mounting -##lain -bentley -slightest -xu -recipients -##jk -merlin -##writer -seniors -prisons -blinking -hindwings -flickered -kappa -##hel -80s -strengthening -appealing -brewing -gypsy -mali -lashes -hulk -unpleasant -harassment -bio -treaties -predict -instrumentation -pulp -troupe -boiling -mantle -##ffe -ins -##vn -dividing -handles -verbs -##onal -coconut -senegal -340 -thorough -gum -momentarily -##sto -cocaine -panicked -destined -##turing -teatro -denying -weary -captained -mans -##hawks -##code -wakefield -bollywood -thankfully -##16 -cyril -##wu -amendments -##bahn -consultation -stud -reflections -kindness -1787 -internally -##ovo -tex -mosaic -distribute -paddy -seeming -143 -##hic -piers -##15 -##mura -##verse -popularly -winger -kang -sentinel -mccoy -##anza -covenant -##bag -verge -fireworks -suppress -thrilled -dominate -##jar -swansea -##60 -142 -reconciliation -##ndi -stiffened -cue -dorian -##uf -damascus -amor -ida -foremost -##aga -porsche -unseen -dir -##had -##azi -stony -lexi -melodies -##nko -angular -integer -podcast -ants -inherent -jaws -justify -persona -##olved -josephine -##nr -##ressed -customary -flashes -gala -cyrus -glaring -backyard -ariel -physiology -greenland -html -stir -avon -atletico -finch -methodology -ked -##lent -mas -catholicism -townsend -branding -quincy -fits -containers -1777 -ashore -aragon -##19 -forearm -poisoning -##sd -adopting -conquer -grinding -amnesty -keller -finances -evaluate -forged -lankan -instincts -##uto -guam -bosnian -photographed -workplace -desirable -protector -##dog -allocation -intently -encourages -willy -##sten -bodyguard -electro -brighter -##ν -bihar -##chev -lasts -opener -amphibious -sal -verde -arte -##cope -captivity -vocabulary -yields -##tted -agreeing -desmond -pioneered -##chus -strap -campaigned -railroads -##ович -emblem -##dre -stormed -501 -##ulous -marijuana -northumberland -##gn -##nath -bowen -landmarks -beaumont -##qua -danube -##bler -attorneys -th -ge -flyers -critique -villains -cass -mutation -acc -##0s -colombo -mckay -motif -sampling -concluding -syndicate -##rell -neon -stables -ds -warnings -clint -mourning -wilkinson -##tated -merrill -leopard -evenings -exhaled -emil -sonia -ezra -discrete -stove -farrell -fifteenth -prescribed -superhero -##rier -worms -helm -wren -##duction -##hc -expo -##rator -hq -unfamiliar -antony -prevents -acceleration -fiercely -mari -painfully -calculations -cheaper -ign -clifton -irvine -davenport -mozambique -##np -pierced -##evich -wonders -##wig -##cate -##iling -crusade -ware -##uel -enzymes -reasonably -mls -##coe -mater -ambition -bunny -eliot -kernel -##fin -asphalt -headmaster -torah -aden -lush -pins -waived -##care -##yas -joao -substrate -enforce -##grad -##ules -alvarez -selections -epidemic -tempted -##bit -bremen -translates -ensured -waterfront -29th -forrest -manny -malone -kramer -reigning -cookies -simpler -absorption -205 -engraved -##ffy -evaluated -1778 -haze -146 -comforting -crossover -##abe -thorn -##rift -##imo -##pop -suppression -fatigue -cutter -##tr -201 -wurttemberg -##orf -enforced -hovering -proprietary -gb -samurai -syllable -ascent -lacey -tick -lars -tractor -merchandise -rep -bouncing -defendants -##yre -huntington -##ground -##oko -standardized -##hor -##hima -assassinated -nu -predecessors -rainy -liar -assurance -lyrical -##uga -secondly -flattened -ios -parameter -undercover -##mity -bordeaux -punish -ridges -markers -exodus -inactive -hesitate -debbie -nyc -pledge -savoy -nagar -offset -organist -##tium -hesse -marin -converting -##iver -diagram -propulsion -pu -validity -reverted -supportive -##dc -ministries -clans -responds -proclamation -##inae -##ø -##rea -ein -pleading -patriot -sf -birch -islanders -strauss -hates -##dh -brandenburg -concession -rd -##ob -1900s -killings -textbook -antiquity -cinematography -wharf -embarrassing -setup -creed -farmland -inequality -centred -signatures -fallon -370 -##ingham -##uts -ceylon -gazing -directive -laurie -##tern -globally -##uated -##dent -allah -excavation -threads -##cross -148 -frantically -icc -utilize -determines -respiratory -thoughtful -receptions -##dicate -merging -chandra -seine -147 -builders -builds -diagnostic -dev -visibility -goddamn -analyses -dhaka -cho -proves -chancel -concurrent -curiously -canadians -pumped -restoring -1850s -turtles -jaguar -sinister -spinal -traction -declan -vows -1784 -glowed -capitalism -swirling -install -universidad -##lder -##oat -soloist -##genic -##oor -coincidence -beginnings -nissan -dip -resorts -caucasus -combustion -infectious -##eno -pigeon -serpent -##itating -conclude -masked -salad -jew -##gr -surreal -toni -##wc -harmonica -151 -##gins -##etic -##coat -fishermen -intending -bravery -##wave -klaus -titan -wembley -taiwanese -ransom -40th -incorrect -hussein -eyelids -jp -cooke -dramas -utilities -##etta -##print -eisenhower -principally -granada -lana -##rak -openings -concord -##bl -bethany -connie -morality -sega -##mons -##nard -earnings -##kara -##cine -wii -communes -##rel -coma -composing -softened -severed -grapes -##17 -nguyen -analyzed -warlord -hubbard -heavenly -behave -slovenian -##hit -##ony -hailed -filmmakers -trance -caldwell -skye -unrest -coward -likelihood -##aging -bern -sci -taliban -honolulu -propose -##wang -1700 -browser -imagining -cobra -contributes -dukes -instinctively -conan -violinist -##ores -accessories -gradual -##amp -quotes -sioux -##dating -undertake -intercepted -sparkling -compressed -139 -fungus -tombs -haley -imposing -rests -degradation -lincolnshire -retailers -wetlands -tulsa -distributor -dungeon -nun -greenhouse -convey -atlantis -aft -exits -oman -dresser -lyons -##sti -joking -eddy -judgement -omitted -digits -##cts -##game -juniors -##rae -cents -stricken -une -##ngo -wizards -weir -breton -nan -technician -fibers -liking -royalty -##cca -154 -persia -terribly -magician -##rable -##unt -vance -cafeteria -booker -camille -warmer -##static -consume -cavern -gaps -compass -contemporaries -foyer -soothing -graveyard -maj -plunged -blush -##wear -cascade -demonstrates -ordinance -##nov -boyle -##lana -rockefeller -shaken -banjo -izzy -##ense -breathless -vines -##32 -##eman -alterations -chromosome -dwellings -feudal -mole -153 -catalonia -relics -tenant -mandated -##fm -fridge -hats -honesty -patented -raul -heap -cruisers -accusing -enlightenment -infants -wherein -chatham -contractors -zen -affinity -hc -osborne -piston -156 -traps -maturity -##rana -lagos -##zal -peering -##nay -attendant -dealers -protocols -subset -prospects -biographical -##cre -artery -##zers -insignia -nuns -endured -##eration -recommend -schwartz -serbs -berger -cromwell -crossroads -##ctor -enduring -clasped -grounded -##bine -marseille -twitched -abel -choke -https -catalyst -moldova -italians -##tist -disastrous -wee -##oured -##nti -wwf -nope -##piration -##asa -expresses -thumbs -167 -##nza -coca -1781 -cheating -##ption -skipped -sensory -heidelberg -spies -satan -dangers -semifinal -202 -bohemia -whitish -confusing -shipbuilding -relies -surgeons -landings -ravi -baku -moor -suffix -alejandro -##yana -litre -upheld -##unk -rajasthan -##rek -coaster -insists -posture -scenarios -etienne -favoured -appoint -transgender -elephants -poked -greenwood -defences -fulfilled -militant -somali -1758 -chalk -potent -##ucci -migrants -wink -assistants -nos -restriction -activism -niger -##ario -colon -shaun -##sat -daphne -##erated -swam -congregations -reprise -considerations -magnet -playable -xvi -##р -overthrow -tobias -knob -chavez -coding -##mers -propped -katrina -orient -newcomer -##suke -temperate -##pool -farmhouse -interrogation -##vd -committing -##vert -forthcoming -strawberry -joaquin -macau -ponds -shocking -siberia -##cellular -chant -contributors -##nant -##ologists -sped -absorb -hail -1782 -spared -##hore -barbados -karate -opus -originates -saul -##xie -evergreen -leaped -##rock -correlation -exaggerated -weekday -unification -bump -tracing -brig -afb -pathways -utilizing -##ners -mod -mb -disturbance -kneeling -##stad -##guchi -100th -pune -##thy -decreasing -168 -manipulation -miriam -academia -ecosystem -occupational -rbi -##lem -rift -##14 -rotary -stacked -incorporation -awakening -generators -guerrero -racist -##omy -cyber -derivatives -culminated -allie -annals -panzer -sainte -wikipedia -pops -zu -austro -##vate -algerian -politely -nicholson -mornings -educate -tastes -thrill -dartmouth -##gating -db -##jee -regan -differing -concentrating -choreography -divinity -##media -pledged -alexandre -routing -gregor -madeline -##idal -apocalypse -##hora -gunfire -culminating -elves -fined -liang -lam -programmed -tar -guessing -transparency -gabrielle -##gna -cancellation -flexibility -##lining -accession -shea -stronghold -nets -specializes -##rgan -abused -hasan -sgt -ling -exceeding -##₄ -admiration -supermarket -##ark -photographers -specialised -tilt -resonance -hmm -perfume -380 -sami -threatens -garland -botany -guarding -boiled -greet -puppy -russo -supplier -wilmington -vibrant -vijay -##bius -paralympic -grumbled -paige -faa -licking -margins -hurricanes -##gong -fest -grenade -ripping -##uz -counseling -weigh -##sian -needles -wiltshire -edison -costly -##not -fulton -tramway -redesigned -staffordshire -cache -gasping -watkins -sleepy -candidacy -##group -monkeys -timeline -throbbing -##bid -##sos -berth -uzbekistan -vanderbilt -bothering -overturned -ballots -gem -##iger -sunglasses -subscribers -hooker -compelling -ang -exceptionally -saloon -stab -##rdi -carla -terrifying -rom -##vision -coil -##oids -satisfying -vendors -31st -mackay -deities -overlooked -ambient -bahamas -felipe -olympia -whirled -botanist -advertised -tugging -##dden -disciples -morales -unionist -rites -foley -morse -motives -creepy -##₀ -soo -##sz -bargain -highness -frightening -turnpike -tory -reorganization -##cer -depict -biographer -##walk -unopposed -manifesto -##gles -institut -emile -accidental -kapoor -##dam -kilkenny -cortex -lively -##13 -romanesque -jain -shan -cannons -##ood -##ske -petrol -echoing -amalgamated -disappears -cautious -proposes -sanctions -trenton -##ر -flotilla -aus -contempt -tor -canary -cote -theirs -##hun -conceptual -deleted -fascinating -paso -blazing -elf -honourable -hutchinson -##eiro -##outh -##zin -surveyor -tee -amidst -wooded -reissue -intro -##ono -cobb -shelters -newsletter -hanson -brace -encoding -confiscated -dem -caravan -marino -scroll -melodic -cows -imam -##adi -##aneous -northward -searches -biodiversity -cora -310 -roaring -##bers -connell -theologian -halo -compose -pathetic -unmarried -dynamo -##oot -az -calculation -toulouse -deserves -humour -nr -forgiveness -tam -undergone -martyr -pamela -myths -whore -counselor -hicks -290 -heavens -battleship -electromagnetic -##bbs -stellar -establishments -presley -hopped -##chin -temptation -90s -wills -nas -##yuan -nhs -##nya -seminars -##yev -adaptations -gong -asher -lex -indicator -sikh -tobago -cites -goin -##yte -satirical -##gies -characterised -correspond -bubbles -lure -participates -##vid -eruption -skate -therapeutic -1785 -canals -wholesale -defaulted -sac -460 -petit -##zzled -virgil -leak -ravens -256 -portraying -##yx -ghetto -creators -dams -portray -vicente -##rington -fae -namesake -bounty -##arium -joachim -##ota -##iser -aforementioned -axle -snout -depended -dismantled -reuben -480 -##ibly -gallagher -##lau -##pd -earnest -##ieu -##iary -inflicted -objections -##llar -asa -gritted -##athy -jericho -##sea -##was -flick -underside -ceramics -undead -substituted -195 -eastward -undoubtedly -wheeled -chimney -##iche -guinness -cb -##ager -siding -##bell -traitor -baptiste -disguised -inauguration -149 -tipperary -choreographer -perched -warmed -stationary -eco -##ike -##ntes -bacterial -##aurus -flores -phosphate -##core -attacker -invaders -alvin -intersects -a1 -indirectly -immigrated -businessmen -cornelius -valves -narrated -pill -sober -ul -nationale -monastic -applicants -scenery -##jack -161 -motifs -constitutes -cpu -##osh -jurisdictions -sd -tuning -irritation -woven -##uddin -fertility -gao -##erie -antagonist -impatient -glacial -hides -boarded -denominations -interception -##jas -cookie -nicola -##tee -algebraic -marquess -bahn -parole -buyers -bait -turbines -paperwork -bestowed -natasha -renee -oceans -purchases -157 -vaccine -215 -##tock -fixtures -playhouse -integrate -jai -oswald -intellectuals -##cky -booked -nests -mortimer -##isi -obsession -sept -##gler -##sum -440 -scrutiny -simultaneous -squinted -##shin -collects -oven -shankar -penned -remarkably -##я -slips -luggage -spectral -1786 -collaborations -louie -consolidation -##ailed -##ivating -420 -hoover -blackpool -harness -ignition -vest -tails -belmont -mongol -skinner -##nae -visually -mage -derry -##tism -##unce -stevie -transitional -##rdy -redskins -drying -prep -prospective -##21 -annoyance -oversee -##loaded -fills -##books -##iki -announces -fda -scowled -respects -prasad -mystic -tucson -##vale -revue -springer -bankrupt -1772 -aristotle -salvatore -habsburg -##geny -dal -natal -nut -pod -chewing -darts -moroccan -walkover -rosario -lenin -punjabi -##ße -grossed -scattering -wired -invasive -hui -polynomial -corridors -wakes -gina -portrays -##cratic -arid -retreating -erich -irwin -sniper -##dha -linen -lindsey -maneuver -butch -shutting -socio -bounce -commemorative -postseason -jeremiah -pines -275 -mystical -beads -bp -abbas -furnace -bidding -consulted -assaulted -empirical -rubble -enclosure -sob -weakly -cancel -polly -yielded -##emann -curly -prediction -battered -70s -vhs -jacqueline -render -sails -barked -detailing -grayson -riga -sloane -raging -##yah -herbs -bravo -##athlon -alloy -giggle -imminent -suffers -assumptions -waltz -##itate -accomplishments -##ited -bathing -remixed -deception -prefix -##emia -deepest -##tier -##eis -balkan -frogs -##rong -slab -##pate -philosophers -peterborough -grains -imports -dickinson -rwanda -##atics -1774 -dirk -lan -tablets -##rove -clone -##rice -caretaker -hostilities -mclean -##gre -regimental -treasures -norms -impose -tsar -tango -diplomacy -variously -complain -192 -recognise -arrests -1779 -celestial -pulitzer -##dus -bing -libretto -##moor -adele -splash -##rite -expectation -lds -confronts -##izer -spontaneous -harmful -wedge -entrepreneurs -buyer -##ope -bilingual -translate -rugged -conner -circulated -uae -eaton -##gra -##zzle -lingered -lockheed -vishnu -reelection -alonso -##oom -joints -yankee -headline -cooperate -heinz -laureate -invading -##sford -echoes -scandinavian -##dham -hugging -vitamin -salute -micah -hind -trader -##sper -radioactive -##ndra -militants -poisoned -ratified -remark -campeonato -deprived -wander -prop -##dong -outlook -##tani -##rix -##eye -chiang -darcy -##oping -mandolin -spice -statesman -babylon -182 -walled -forgetting -afro -##cap -158 -giorgio -buffer -##polis -planetary -##gis -overlap -terminals -kinda -centenary -##bir -arising -manipulate -elm -ke -1770 -ak -##tad -chrysler -mapped -moose -pomeranian -quad -macarthur -assemblies -shoreline -recalls -stratford -##rted -noticeable -##evic -imp -##rita -##sque -accustomed -supplying -tents -disgusted -vogue -sipped -filters -khz -reno -selecting -luftwaffe -mcmahon -tyne -masterpiece -carriages -collided -dunes -exercised -flare -remembers -muzzle -##mobile -heck -##rson -burgess -lunged -middleton -boycott -bilateral -##sity -hazardous -lumpur -multiplayer -spotlight -jackets -goldman -liege -porcelain -rag -waterford -benz -attracts -hopeful -battling -ottomans -kensington -baked -hymns -cheyenne -lattice -levine -borrow -polymer -clashes -michaels -monitored -commitments -denounced -##25 -##von -cavity -##oney -hobby -akin -##holders -futures -intricate -cornish -patty -##oned -illegally -dolphin -##lag -barlow -yellowish -maddie -apologized -luton -plagued -##puram -nana -##rds -sway -fanny -łodz -##rino -psi -suspicions -hanged -##eding -initiate -charlton -##por -nak -competent -235 -analytical -annex -wardrobe -reservations -##rma -sect -162 -fairfax -hedge -piled -buckingham -uneven -bauer -simplicity -snyder -interpret -accountability -donors -moderately -byrd -continents -##cite -##max -disciple -hr -jamaican -ping -nominees -##uss -mongolian -diver -attackers -eagerly -ideological -pillows -miracles -apartheid -revolver -sulfur -clinics -moran -163 -##enko -ile -katy -rhetoric -##icated -chronology -recycling -##hrer -elongated -mughal -pascal -profiles -vibration -databases -domination -##fare -##rant -matthias -digest -rehearsal -polling -weiss -initiation -reeves -clinging -flourished -impress -ngo -##hoff -##ume -buckley -symposium -rhythms -weed -emphasize -transforming -##taking -##gence -##yman -accountant -analyze -flicker -foil -priesthood -voluntarily -decreases -##80 -##hya -slater -sv -charting -mcgill -##lde -moreno -##iu -besieged -zur -robes -##phic -admitting -api -deported -turmoil -peyton -earthquakes -##ares -nationalists -beau -clair -brethren -interrupt -welch -curated -galerie -requesting -164 -##ested -impending -steward -viper -##vina -complaining -beautifully -brandy -foam -nl -1660 -##cake -alessandro -punches -laced -explanations -##lim -attribute -clit -reggie -discomfort -##cards -smoothed -whales -##cene -adler -countered -duffy -disciplinary -widening -recipe -reliance -conducts -goats -gradient -preaching -##shaw -matilda -quasi -striped -meridian -cannabis -cordoba -certificates -##agh -##tering -graffiti -hangs -pilgrims -repeats -##ych -revive -urine -etat -##hawk -fueled -belts -fuzzy -susceptible -##hang -mauritius -salle -sincere -beers -hooks -##cki -arbitration -entrusted -advise -sniffed -seminar -junk -donnell -processors -principality -strapped -celia -mendoza -everton -fortunes -prejudice -starving -reassigned -steamer -##lund -tuck -evenly -foreman -##ffen -dans -375 -envisioned -slit -##xy -baseman -liberia -rosemary -##weed -electrified -periodically -potassium -stride -contexts -sperm -slade -mariners -influx -bianca -subcommittee -##rane -spilling -icao -estuary -##nock -delivers -iphone -##ulata -isa -mira -bohemian -dessert -##sbury -welcoming -proudly -slowing -##chs -musee -ascension -russ -##vian -waits -##psy -africans -exploit -##morphic -gov -eccentric -crab -peck -##ull -entrances -formidable -marketplace -groom -bolted -metabolism -patton -robbins -courier -payload -endure -##ifier -andes -refrigerator -##pr -ornate -##uca -ruthless -illegitimate -masonry -strasbourg -bikes -adobe -##³ -apples -quintet -willingly -niche -bakery -corpses -energetic -##cliffe -##sser -##ards -177 -centimeters -centro -fuscous -cretaceous -rancho -##yde -andrei -telecom -tottenham -oasis -ordination -vulnerability -presiding -corey -cp -penguins -sims -##pis -malawi -piss -##48 -correction -##cked -##ffle -##ryn -countdown -detectives -psychiatrist -psychedelic -dinosaurs -blouse -##get -choi -vowed -##oz -randomly -##pol -49ers -scrub -blanche -bruins -dusseldorf -##using -unwanted -##ums -212 -dominique -elevations -headlights -om -laguna -##oga -1750 -famously -ignorance -shrewsbury -##aine -ajax -breuning -che -confederacy -greco -overhaul -##screen -paz -skirts -disagreement -cruelty -jagged -phoebe -shifter -hovered -viruses -##wes -mandy -##lined -##gc -landlord -squirrel -dashed -##ι -ornamental -gag -wally -grange -literal -spurs -undisclosed -proceeding -yin -##text -billie -orphan -spanned -humidity -indy -weighted -presentations -explosions -lucian -##tary -vaughn -hindus -##anga -##hell -psycho -171 -daytona -protects -efficiently -rematch -sly -tandem -##oya -rebranded -impaired -hee -metropolis -peach -godfrey -diaspora -ethnicity -prosperous -gleaming -dar -grossing -playback -##rden -stripe -pistols -##tain -births -labelled -##cating -172 -rudy -alba -##onne -aquarium -hostility -##gb -##tase -shudder -sumatra -hardest -lakers -consonant -creeping -demos -homicide -capsule -zeke -liberties -expulsion -pueblo -##comb -trait -transporting -##ddin -##neck -##yna -depart -gregg -mold -ledge -hangar -oldham -playboy -termination -analysts -gmbh -romero -##itic -insist -cradle -filthy -brightness -slash -shootout -deposed -bordering -##truct -isis -microwave -tumbled -sheltered -cathy -werewolves -messy -andersen -convex -clapped -clinched -satire -wasting -edo -vc -rufus -##jak -mont -##etti -poznan -##keeping -restructuring -transverse -##rland -azerbaijani -slovene -gestures -roommate -choking -shear -##quist -vanguard -oblivious -##hiro -disagreed -baptism -##lich -coliseum -##aceae -salvage -societe -cory -locke -relocation -relying -versailles -ahl -swelling -##elo -cheerful -##word -##edes -gin -sarajevo -obstacle -diverted -##nac -messed -thoroughbred -fluttered -utrecht -chewed -acquaintance -assassins -dispatch -mirza -##wart -nike -salzburg -swell -yen -##gee -idle -ligue -samson -##nds -##igh -playful -spawned -##cise -tease -##case -burgundy -##bot -stirring -skeptical -interceptions -marathi -##dies -bedrooms -aroused -pinch -##lik -preferences -tattoos -buster -digitally -projecting -rust -##ital -kitten -priorities -addison -pseudo -##guard -dusk -icons -sermon -##psis -##iba -bt -##lift -##xt -ju -truce -rink -##dah -##wy -defects -psychiatry -offences -calculate -glucose -##iful -##rized -##unda -francaise -##hari -richest -warwickshire -carly -1763 -purity -redemption -lending -##cious -muse -bruises -cerebral -aero -carving -##name -preface -terminology -invade -monty -##int -anarchist -blurred -##iled -rossi -treats -guts -shu -foothills -ballads -undertaking -premise -cecilia -affiliates -blasted -conditional -wilder -minors -drone -rudolph -buffy -swallowing -horton -attested -##hop -rutherford -howell -primetime -livery -penal -##bis -minimize -hydro -wrecked -wrought -palazzo -##gling -cans -vernacular -friedman -nobleman -shale -walnut -danielle -##ection -##tley -sears -##kumar -chords -lend -flipping -streamed -por -dracula -gallons -sacrifices -gamble -orphanage -##iman -mckenzie -##gible -boxers -daly -##balls -##ان -208 -##ific -##rative -##iq -exploited -slated -##uity -circling -hillary -pinched -goldberg -provost -campaigning -lim -piles -ironically -jong -mohan -successors -usaf -##tem -##ught -autobiographical -haute -preserves -##ending -acquitted -comparisons -203 -hydroelectric -gangs -cypriot -torpedoes -rushes -chrome -derive -bumps -instability -fiat -pets -##mbe -silas -dye -reckless -settler -##itation -info -heats -##writing -176 -canonical -maltese -fins -mushroom -stacy -aspen -avid -##kur -##loading -vickers -gaston -hillside -statutes -wilde -gail -kung -sabine -comfortably -motorcycles -##rgo -169 -pneumonia -fetch -##sonic -axel -faintly -parallels -##oop -mclaren -spouse -compton -interdisciplinary -miner -##eni -181 -clamped -##chal -##llah -separates -versa -##mler -scarborough -labrador -##lity -##osing -rutgers -hurdles -como -166 -burt -divers -##100 -wichita -cade -coincided -##erson -bruised -mla -##pper -vineyard -##ili -##brush -notch -mentioning -jase -hearted -kits -doe -##acle -pomerania -##ady -ronan -seizure -pavel -problematic -##zaki -domenico -##ulin -catering -penelope -dependence -parental -emilio -ministerial -atkinson -##bolic -clarkson -chargers -colby -grill -peeked -arises -summon -##aged -fools -##grapher -faculties -qaeda -##vial -garner -refurbished -##hwa -geelong -disasters -nudged -bs -shareholder -lori -algae -reinstated -rot -##ades -##nous -invites -stainless -183 -inclusive -##itude -diocesan -til -##icz -denomination -##xa -benton -floral -registers -##ider -##erman -##kell -absurd -brunei -guangzhou -hitter -retaliation -##uled -##eve -blanc -nh -consistency -contamination -##eres -##rner -dire -palermo -broadcasters -diaries -inspire -vols -brewer -tightening -ky -mixtape -hormone -##tok -stokes -##color -##dly -##ssi -pg -##ometer -##lington -sanitation -##tility -intercontinental -apps -##adt -¹⁄₂ -cylinders -economies -favourable -unison -croix -gertrude -odyssey -vanity -dangling -##logists -upgrades -dice -middleweight -practitioner -##ight -206 -henrik -parlor -orion -angered -lac -python -blurted -##rri -sensual -intends -swings -angled -##phs -husky -attain -peerage -precinct -textiles -cheltenham -shuffled -dai -confess -tasting -bhutan -##riation -tyrone -segregation -abrupt -ruiz -##rish -smirked -blackwell -confidential -browning -amounted -##put -vase -scarce -fabulous -raided -staple -guyana -unemployed -glider -shay -##tow -carmine -troll -intervene -squash -superstar -##uce -cylindrical -len -roadway -researched -handy -##rium -##jana -meta -lao -declares -##rring -##tadt -##elin -##kova -willem -shrubs -napoleonic -realms -skater -qi -volkswagen -##ł -tad -hara -archaeologist -awkwardly -eerie -##kind -wiley -##heimer -##24 -titus -organizers -cfl -crusaders -lama -usb -vent -enraged -thankful -occupants -maximilian -##gaard -possessing -textbooks -##oran -collaborator -quaker -##ulo -avalanche -mono -silky -straits -isaiah -mustang -surged -resolutions -potomac -descend -cl -kilograms -plato -strains -saturdays -##olin -bernstein -##ype -holstein -ponytail -##watch -belize -conversely -heroine -perpetual -##ylus -charcoal -piedmont -glee -negotiating -backdrop -prologue -##jah -##mmy -pasadena -climbs -ramos -sunni -##holm -##tner -##tri -anand -deficiency -hertfordshire -stout -##avi -aperture -orioles -##irs -doncaster -intrigued -bombed -coating -otis -##mat -cocktail -##jit -##eto -amir -arousal -sar -##proof -##act -##ories -dixie -pots -##bow -whereabouts -159 -##fted -drains -bullying -cottages -scripture -coherent -fore -poe -appetite -##uration -sampled -##ators -##dp -derrick -rotor -jays -peacock -installment -##rro -advisors -##coming -rodeo -scotch -##mot -##db -##fen -##vant -ensued -rodrigo -dictatorship -martyrs -twenties -##н -towed -incidence -marta -rainforest -sai -scaled -##cles -oceanic -qualifiers -symphonic -mcbride -dislike -generalized -aubrey -colonization -##iation -##lion -##ssing -disliked -lublin -salesman -##ulates -spherical -whatsoever -sweating -avalon -contention -punt -severity -alderman -atari -##dina -##grant -##rop -scarf -seville -vertices -annexation -fairfield -fascination -inspiring -launches -palatinate -regretted -##rca -feral -##iom -elk -nap -olsen -reddy -yong -##leader -##iae -garment -transports -feng -gracie -outrage -viceroy -insides -##esis -breakup -grady -organizer -softer -grimaced -222 -murals -galicia -arranging -vectors -##rsten -bas -##sb -##cens -sloan -##eka -bitten -ara -fender -nausea -bumped -kris -banquet -comrades -detector -persisted -##llan -adjustment -endowed -cinemas -##shot -sellers -##uman -peek -epa -kindly -neglect -simpsons -talon -mausoleum -runaway -hangul -lookout -##cic -rewards -coughed -acquainted -chloride -##ald -quicker -accordion -neolithic -##qa -artemis -coefficient -lenny -pandora -tx -##xed -ecstasy -litter -segunda -chairperson -gemma -hiss -rumor -vow -nasal -antioch -compensate -patiently -transformers -##eded -judo -morrow -penis -posthumous -philips -bandits -husbands -denote -flaming -##any -##phones -langley -yorker -1760 -walters -##uo -##kle -gubernatorial -fatty -samsung -leroy -outlaw -##nine -unpublished -poole -jakob -##ᵢ -##ₙ -crete -distorted -superiority -##dhi -intercept -crust -mig -claus -crashes -positioning -188 -stallion -301 -frontal -armistice -##estinal -elton -aj -encompassing -camel -commemorated -malaria -woodward -calf -cigar -penetrate -##oso -willard -##rno -##uche -illustrate -amusing -convergence -noteworthy -##lma -##rva -journeys -realise -manfred -##sable -410 -##vocation -hearings -fiance -##posed -educators -provoked -adjusting -##cturing -modular -stockton -paterson -vlad -rejects -electors -selena -maureen -##tres -uber -##rce -swirled -##num -proportions -nanny -pawn -naturalist -parma -apostles -awoke -ethel -wen -##bey -monsoon -overview -##inating -mccain -rendition -risky -adorned -##ih -equestrian -germain -nj -conspicuous -confirming -##yoshi -shivering -##imeter -milestone -rumours -flinched -bounds -smacked -token -##bei -lectured -automobiles -##shore -impacted -##iable -nouns -nero -##leaf -ismail -prostitute -trams -##lace -bridget -sud -stimulus -impressions -reins -revolves -##oud -##gned -giro -honeymoon -##swell -criterion -##sms -##uil -libyan -prefers -##osition -211 -preview -sucks -accusation -bursts -metaphor -diffusion -tolerate -faye -betting -cinematographer -liturgical -specials -bitterly -humboldt -##ckle -flux -rattled -##itzer -archaeologists -odor -authorised -marshes -discretion -##ов -alarmed -archaic -inverse -##leton -explorers -##pine -drummond -tsunami -woodlands -##minate -##tland -booklet -insanity -owning -insert -crafted -calculus -##tore -receivers -##bt -stung -##eca -##nched -prevailing -travellers -eyeing -lila -graphs -##borne -178 -julien -##won -morale -adaptive -therapist -erica -cw -libertarian -bowman -pitches -vita -##ional -crook -##ads -##entation -caledonia -mutiny -##sible -1840s -automation -##ß -flock -##pia -ironic -pathology -##imus -remarried -##22 -joker -withstand -energies -##att -shropshire -hostages -madeleine -tentatively -conflicting -mateo -recipes -euros -ol -mercenaries -nico -##ndon -albuquerque -augmented -mythical -bel -freud -##child -cough -##lica -365 -freddy -lillian -genetically -nuremberg -calder -209 -bonn -outdoors -paste -suns -urgency -vin -restraint -tyson -##cera -##selle -barrage -bethlehem -kahn -##par -mounts -nippon -barony -happier -ryu -makeshift -sheldon -blushed -castillo -barking -listener -taped -bethel -fluent -headlines -pornography -rum -disclosure -sighing -mace -doubling -gunther -manly -##plex -rt -interventions -physiological -forwards -emerges -##tooth -##gny -compliment -rib -recession -visibly -barge -faults -connector -exquisite -prefect -##rlin -patio -##cured -elevators -brandt -italics -pena -173 -wasp -satin -ea -botswana -graceful -respectable -##jima -##rter -##oic -franciscan -generates -##dl -alfredo -disgusting -##olate -##iously -sherwood -warns -cod -promo -cheryl -sino -##ة -##escu -twitch -##zhi -brownish -thom -ortiz -##dron -densely -##beat -carmel -reinforce -##bana -187 -anastasia -downhill -vertex -contaminated -remembrance -harmonic -homework -##sol -fiancee -gears -olds -angelica -loft -ramsay -quiz -colliery -sevens -##cape -autism -##hil -walkway -##boats -ruben -abnormal -ounce -khmer -##bbe -zachary -bedside -morphology -punching -##olar -sparrow -convinces -##35 -hewitt -queer -remastered -rods -mabel -solemn -notified -lyricist -symmetric -##xide -174 -encore -passports -wildcats -##uni -baja -##pac -mildly -##ease -bleed -commodity -mounds -glossy -orchestras -##omo -damian -prelude -ambitions -##vet -awhile -remotely -##aud -asserts -imply -##iques -distinctly -modelling -remedy -##dded -windshield -dani -xiao -##endra -audible -powerplant -1300 -invalid -elemental -acquisitions -##hala -immaculate -libby -plata -smuggling -ventilation -denoted -minh -##morphism -430 -differed -dion -kelley -lore -mocking -sabbath -spikes -hygiene -drown -runoff -stylized -tally -liberated -aux -interpreter -righteous -aba -siren -reaper -pearce -millie -##cier -##yra -gaius -##iso -captures -##ttering -dorm -claudio -##sic -benches -knighted -blackness -##ored -discount -fumble -oxidation -routed -##ς -novak -perpendicular -spoiled -fracture -splits -##urt -pads -topology -##cats -axes -fortunate -offenders -protestants -esteem -221 -broadband -convened -frankly -hound -prototypes -isil -facilitated -keel -##sher -sahara -awaited -bubba -orb -prosecutors -186 -hem -520 -##xing -relaxing -remnant -romney -sorted -slalom -stefano -ulrich -##active -exemption -folder -pauses -foliage -hitchcock -epithet -204 -criticisms -##aca -ballistic -brody -hinduism -chaotic -youths -equals -##pala -pts -thicker -analogous -capitalist -improvised -overseeing -sinatra -ascended -beverage -##tl -straightforward -##kon -curran -##west -bois -325 -induce -surveying -emperors -sax -unpopular -##kk -cartoonist -fused -##mble -unto -##yuki -localities -##cko -##ln -darlington -slain -academie -lobbying -sediment -puzzles -##grass -defiance -dickens -manifest -tongues -alumnus -arbor -coincide -184 -appalachian -mustafa -examiner -cabaret -traumatic -yves -bracelet -draining -heroin -magnum -baths -odessa -consonants -mitsubishi -##gua -kellan -vaudeville -##fr -joked -null -straps -probation -##ław -ceded -interfaces -##pas -##zawa -blinding -viet -224 -rothschild -museo -640 -huddersfield -##vr -tactic -##storm -brackets -dazed -incorrectly -##vu -reg -glazed -fearful -manifold -benefited -irony -##sun -stumbling -##rte -willingness -balkans -mei -wraps -##aba -injected -##lea -gu -syed -harmless -##hammer -bray -takeoff -poppy -timor -cardboard -astronaut -purdue -weeping -southbound -cursing -stalls -diagonal -##neer -lamar -bryce -comte -weekdays -harrington -##uba -negatively -##see -lays -grouping -##cken -##henko -affirmed -halle -modernist -##lai -hodges -smelling -aristocratic -baptized -dismiss -justification -oilers -##now -coupling -qin -snack -healer -##qing -gardener -layla -battled -formulated -stephenson -gravitational -##gill -##jun -1768 -granny -coordinating -suites -##cd -##ioned -monarchs -##cote -##hips -sep -blended -apr -barrister -deposition -fia -mina -policemen -paranoid -##pressed -churchyard -covert -crumpled -creep -abandoning -tr -transmit -conceal -barr -understands -readiness -spire -##cology -##enia -##erry -610 -startling -unlock -vida -bowled -slots -##nat -##islav -spaced -trusting -admire -rig -##ink -slack -##70 -mv -207 -casualty -##wei -classmates -##odes -##rar -##rked -amherst -furnished -evolve -foundry -menace -mead -##lein -flu -wesleyan -##kled -monterey -webber -##vos -wil -##mith -##на -bartholomew -justices -restrained -##cke -amenities -191 -mediated -sewage -trenches -ml -mainz -##thus -1800s -##cula -##inski -caine -bonding -213 -converts -spheres -superseded -marianne -crypt -sweaty -ensign -historia -##br -spruce -##post -##ask -forks -thoughtfully -yukon -pamphlet -ames -##uter -karma -##yya -bryn -negotiation -sighs -incapable -##mbre -##ntial -actresses -taft -##mill -luce -prevailed -##amine -1773 -motionless -envoy -testify -investing -sculpted -instructors -provence -kali -cullen -horseback -##while -goodwin -##jos -gaa -norte -##ldon -modify -wavelength -abd -214 -skinned -sprinter -forecast -scheduling -marries -squared -tentative -##chman -boer -##isch -bolts -swap -fisherman -assyrian -impatiently -guthrie -martins -murdoch -194 -tanya -nicely -dolly -lacy -med -##45 -syn -decks -fashionable -millionaire -##ust -surfing -##ml -##ision -heaved -tammy -consulate -attendees -routinely -197 -fuse -saxophonist -backseat -malaya -##lord -scowl -tau -##ishly -193 -sighted -steaming -##rks -303 -911 -##holes -##hong -ching -##wife -bless -conserved -jurassic -stacey -unix -zion -chunk -rigorous -blaine -198 -peabody -slayer -dismay -brewers -nz -##jer -det -##glia -glover -postwar -int -penetration -sylvester -imitation -vertically -airlift -heiress -knoxville -viva -##uin -390 -macon -##rim -##fighter -##gonal -janice -##orescence -##wari -marius -belongings -leicestershire -196 -blanco -inverted -preseason -sanity -sobbing -##due -##elt -##dled -collingwood -regeneration -flickering -shortest -##mount -##osi -feminism -##lat -sherlock -cabinets -fumbled -northbound -precedent -snaps -##mme -researching -##akes -guillaume -insights -manipulated -vapor -neighbour -sap -gangster -frey -f1 -stalking -scarcely -callie -barnett -tendencies -audi -doomed -assessing -slung -panchayat -ambiguous -bartlett -##etto -distributing -violating -wolverhampton -##hetic -swami -histoire -##urus -liable -pounder -groin -hussain -larsen -popping -surprises -##atter -vie -curt -##station -mute -relocate -musicals -authorization -richter -##sef -immortality -tna -bombings -##press -deteriorated -yiddish -##acious -robbed -colchester -cs -pmid -ao -verified -balancing -apostle -swayed -recognizable -oxfordshire -retention -nottinghamshire -contender -judd -invitational -shrimp -uhf -##icient -cleaner -longitudinal -tanker -##mur -acronym -broker -koppen -sundance -suppliers -##gil -4000 -clipped -fuels -petite -##anne -landslide -helene -diversion -populous -landowners -auspices -melville -quantitative -##xes -ferries -nicky -##llus -doo -haunting -roche -carver -downed -unavailable -##pathy -approximation -hiroshima -##hue -garfield -valle -comparatively -keyboardist -traveler -##eit -congestion -calculating -subsidiaries -##bate -serb -modernization -fairies -deepened -ville -averages -##lore -inflammatory -tonga -##itch -co₂ -squads -##hea -gigantic -serum -enjoyment -retailer -verona -35th -cis -##phobic -magna -technicians -##vati -arithmetic -##sport -levin -##dation -amtrak -chow -sienna -##eyer -backstage -entrepreneurship -##otic -learnt -tao -##udy -worcestershire -formulation -baggage -hesitant -bali -sabotage -##kari -barren -enhancing -murmur -pl -freshly -putnam -syntax -aces -medicines -resentment -bandwidth -##sier -grins -chili -guido -##sei -framing -implying -gareth -lissa -genevieve -pertaining -admissions -geo -thorpe -proliferation -sato -bela -analyzing -parting -##gor -awakened -##isman -huddled -secrecy -##kling -hush -gentry -540 -dungeons -##ego -coasts -##utz -sacrificed -##chule -landowner -mutually -prevalence -programmer -adolescent -disrupted -seaside -gee -trusts -vamp -georgie -##nesian -##iol -schedules -sindh -##market -etched -hm -sparse -bey -beaux -scratching -gliding -unidentified -216 -collaborating -gems -jesuits -oro -accumulation -shaping -mbe -anal -##xin -231 -enthusiasts -newscast -##egan -janata -dewey -parkinson -179 -ankara -biennial -towering -dd -inconsistent -950 -##chet -thriving -terminate -cabins -furiously -eats -advocating -donkey -marley -muster -phyllis -leiden -##user -grassland -glittering -iucn -loneliness -217 -memorandum -armenians -##ddle -popularized -rhodesia -60s -lame -##illon -sans -bikini -header -orbits -##xx -##finger -##ulator -sharif -spines -biotechnology -strolled -naughty -yates -##wire -fremantle -milo -##mour -abducted -removes -##atin -humming -wonderland -##chrome -##ester -hume -pivotal -##rates -armand -grams -believers -elector -rte -apron -bis -scraped -##yria -endorsement -initials -##llation -eps -dotted -hints -buzzing -emigration -nearer -##tom -indicators -##ulu -coarse -neutron -protectorate -##uze -directional -exploits -pains -loire -1830s -proponents -guggenheim -rabbits -ritchie -305 -hectare -inputs -hutton -##raz -verify -##ako -boilers -longitude -##lev -skeletal -yer -emilia -citrus -compromised -##gau -pokemon -prescription -paragraph -eduard -cadillac -attire -categorized -kenyan -weddings -charley -##bourg -entertain -monmouth -##lles -nutrients -davey -mesh -incentive -practised -ecosystems -kemp -subdued -overheard -##rya -bodily -maxim -##nius -apprenticeship -ursula -##fight -lodged -rug -silesian -unconstitutional -patel -inspected -coyote -unbeaten -##hak -34th -disruption -convict -parcel -##cl -##nham -collier -implicated -mallory -##iac -##lab -susannah -winkler -##rber -shia -phelps -sediments -graphical -robotic -##sner -adulthood -mart -smoked -##isto -kathryn -clarified -##aran -divides -convictions -oppression -pausing -burying -##mt -federico -mathias -eileen -##tana -kite -hunched -##acies -189 -##atz -disadvantage -liza -kinetic -greedy -paradox -yokohama -dowager -trunks -ventured -##gement -gupta -vilnius -olaf -##thest -crimean -hopper -##ej -progressively -arturo -mouthed -arrondissement -##fusion -rubin -simulcast -oceania -##orum -##stra -##rred -busiest -intensely -navigator -cary -##vine -##hini -##bies -fife -rowe -rowland -posing -insurgents -shafts -lawsuits -activate -conor -inward -culturally -garlic -265 -##eering -eclectic -##hui -##kee -##nl -furrowed -vargas -meteorological -rendezvous -##aus -culinary -commencement -##dition -quota -##notes -mommy -salaries -overlapping -mule -##iology -##mology -sums -wentworth -##isk -##zione -mainline -subgroup -##illy -hack -plaintiff -verdi -bulb -differentiation -engagements -multinational -supplemented -bertrand -caller -regis -##naire -##sler -##arts -##imated -blossom -propagation -kilometer -viaduct -vineyards -##uate -beckett -optimization -golfer -songwriters -seminal -semitic -thud -volatile -evolving -ridley -##wley -trivial -distributions -scandinavia -jiang -##ject -wrestled -insistence -##dio -emphasizes -napkin -##ods -adjunct -rhyme -##ricted -##eti -hopeless -surrounds -tremble -32nd -smoky -##ntly -oils -medicinal -padded -steer -wilkes -219 -255 -concessions -hue -uniquely -blinded -landon -yahoo -##lane -hendrix -commemorating -dex -specify -chicks -##ggio -intercity -1400 -morley -##torm -highlighting -##oting -pang -oblique -stalled -##liner -flirting -newborn -1769 -bishopric -shaved -232 -currie -##ush -dharma -spartan -##ooped -favorites -smug -novella -sirens -abusive -creations -espana -##lage -paradigm -semiconductor -sheen -##rdo -##yen -##zak -nrl -renew -##pose -##tur -adjutant -marches -norma -##enity -ineffective -weimar -grunt -##gat -lordship -plotting -expenditure -infringement -lbs -refrain -av -mimi -mistakenly -postmaster -1771 -##bara -ras -motorsports -tito -199 -subjective -##zza -bully -stew -##kaya -prescott -1a -##raphic -##zam -bids -styling -paranormal -reeve -sneaking -exploding -katz -akbar -migrant -syllables -indefinitely -##ogical -destroys -replaces -applause -##phine -pest -##fide -218 -articulated -bertie -##thing -##cars -##ptic -courtroom -crowley -aesthetics -cummings -tehsil -hormones -titanic -dangerously -##ibe -stadion -jaenelle -auguste -ciudad -##chu -mysore -partisans -##sio -lucan -philipp -##aly -debating -henley -interiors -##rano -##tious -homecoming -beyonce -usher -henrietta -prepares -weeds -##oman -ely -plucked -##pire -##dable -luxurious -##aq -artifact -password -pasture -juno -maddy -minsk -##dder -##ologies -##rone -assessments -martian -royalist -1765 -examines -##mani -##rge -nino -223 -parry -scooped -relativity -##eli -##uting -##cao -congregational -noisy -traverse -##agawa -strikeouts -nickelodeon -obituary -transylvania -binds -depictions -polk -trolley -##yed -##lard -breeders -##under -dryly -hokkaido -1762 -strengths -stacks -bonaparte -connectivity -neared -prostitutes -stamped -anaheim -gutierrez -sinai -##zzling -bram -fresno -madhya -##86 -proton -##lena -##llum -##phon -reelected -wanda -##anus -##lb -ample -distinguishing -##yler -grasping -sermons -tomato -bland -stimulation -avenues -##eux -spreads -scarlett -fern -pentagon -assert -baird -chesapeake -ir -calmed -distortion -fatalities -##olis -correctional -pricing -##astic -##gina -prom -dammit -ying -collaborate -##chia -welterweight -33rd -pointer -substitution -bonded -umpire -communicating -multitude -paddle -##obe -federally -intimacy -##insky -betray -ssr -##lett -##lean -##lves -##therapy -airbus -##tery -functioned -ud -bearer -biomedical -netflix -##hire -##nca -condom -brink -ik -##nical -macy -##bet -flap -gma -experimented -jelly -lavender -##icles -##ulia -munro -##mian -##tial -rye -##rle -60th -gigs -hottest -rotated -predictions -fuji -bu -##erence -##omi -barangay -##fulness -##sas -clocks -##rwood -##liness -cereal -roe -wight -decker -uttered -babu -onion -xml -forcibly -##df -petra -sarcasm -hartley -peeled -storytelling -##42 -##xley -##ysis -##ffa -fibre -kiel -auditor -fig -harald -greenville -##berries -geographically -nell -quartz -##athic -cemeteries -##lr -crossings -nah -holloway -reptiles -chun -sichuan -snowy -660 -corrections -##ivo -zheng -ambassadors -blacksmith -fielded -fluids -hardcover -turnover -medications -melvin -academies -##erton -ro -roach -absorbing -spaniards -colton -##founded -outsider -espionage -kelsey -245 -edible -##ulf -dora -establishes -##sham -##tries -contracting -##tania -cinematic -costello -nesting -##uron -connolly -duff -##nology -mma -##mata -fergus -sexes -gi -optics -spectator -woodstock -banning -##hee -##fle -differentiate -outfielder -refinery -226 -312 -gerhard -horde -lair -drastically -##udi -landfall -##cheng -motorsport -odi -##achi -predominant -quay -skins -##ental -edna -harshly -complementary -murdering -##aves -wreckage -##90 -ono -outstretched -lennox -munitions -galen -reconcile -470 -scalp -bicycles -gillespie -questionable -rosenberg -guillermo -hostel -jarvis -kabul -volvo -opium -yd -##twined -abuses -decca -outpost -##cino -sensible -neutrality -##64 -ponce -anchorage -atkins -turrets -inadvertently -disagree -libre -vodka -reassuring -weighs -##yal -glide -jumper -ceilings -repertory -outs -stain -##bial -envy -##ucible -smashing -heightened -policing -hyun -mixes -lai -prima -##ples -celeste -##bina -lucrative -intervened -kc -manually -##rned -stature -staffed -bun -bastards -nairobi -priced -##auer -thatcher -##kia -tripped -comune -##ogan -##pled -brasil -incentives -emanuel -hereford -musica -##kim -benedictine -biennale -##lani -eureka -gardiner -rb -knocks -sha -##ael -##elled -##onate -efficacy -ventura -masonic -sanford -maize -leverage -##feit -capacities -santana -##aur -novelty -vanilla -##cter -##tour -benin -##oir -##rain -neptune -drafting -tallinn -##cable -humiliation -##boarding -schleswig -fabian -bernardo -liturgy -spectacle -sweeney -pont -routledge -##tment -cosmos -ut -hilt -sleek -universally -##eville -##gawa -typed -##dry -favors -allegheny -glaciers -##rly -recalling -aziz -##log -parasite -requiem -auf -##berto -##llin -illumination -##breaker -##issa -festivities -bows -govern -vibe -vp -333 -sprawled -larson -pilgrim -bwf -leaping -##rts -##ssel -alexei -greyhound -hoarse -##dler -##oration -seneca -##cule -gaping -##ulously -##pura -cinnamon -##gens -##rricular -craven -fantasies -houghton -engined -reigned -dictator -supervising -##oris -bogota -commentaries -unnatural -fingernails -spirituality -tighten -##tm -canadiens -protesting -intentional -cheers -sparta -##ytic -##iere -##zine -widen -belgarath -controllers -dodd -iaaf -navarre -##ication -defect -squire -steiner -whisky -##mins -560 -inevitably -tome -##gold -chew -##uid -##lid -elastic -##aby -streaked -alliances -jailed -regal -##ined -##phy -czechoslovak -narration -absently -##uld -bluegrass -guangdong -quran -criticizing -hose -hari -##liest -##owa -skier -streaks -deploy -##lom -raft -bose -dialed -huff -##eira -haifa -simplest -bursting -endings -ib -sultanate -##titled -franks -whitman -ensures -sven -##ggs -collaborators -forster -organising -ui -banished -napier -injustice -teller -layered -thump -##otti -roc -battleships -evidenced -fugitive -sadie -robotics -##roud -equatorial -geologist -##iza -yielding -##bron -##sr -internationale -mecca -##diment -sbs -skyline -toad -uploaded -reflective -undrafted -lal -leafs -bayern -##dai -lakshmi -shortlisted -##stick -##wicz -camouflage -donate -af -christi -lau -##acio -disclosed -nemesis -1761 -assemble -straining -northamptonshire -tal -##asi -bernardino -premature -heidi -42nd -coefficients -galactic -reproduce -buzzed -sensations -zionist -monsieur -myrtle -##eme -archery -strangled -musically -viewpoint -antiquities -bei -trailers -seahawks -cured -pee -preferring -tasmanian -lange -sul -##mail -##working -colder -overland -lucivar -massey -gatherings -haitian -##smith -disapproval -flaws -##cco -##enbach -1766 -npr -##icular -boroughs -creole -forums -techno -1755 -dent -abdominal -streetcar -##eson -##stream -procurement -gemini -predictable -##tya -acheron -christoph -feeder -fronts -vendor -bernhard -jammu -tumors -slang -##uber -goaltender -twists -curving -manson -vuelta -mer -peanut -confessions -pouch -unpredictable -allowance -theodor -vascular -##factory -bala -authenticity -metabolic -coughing -nanjing -##cea -pembroke -##bard -splendid -36th -ff -hourly -##ahu -elmer -handel -##ivate -awarding -thrusting -dl -experimentation -##hesion -##46 -caressed -entertained -steak -##rangle -biologist -orphans -baroness -oyster -stepfather -##dridge -mirage -reefs -speeding -##31 -barons -1764 -227 -inhabit -preached -repealed -##tral -honoring -boogie -captives -administer -johanna -##imate -gel -suspiciously -1767 -sobs -##dington -backbone -hayward -garry -##folding -##nesia -maxi -##oof -##ppe -ellison -galileo -##stand -crimea -frenzy -amour -bumper -matrices -natalia -baking -garth -palestinians -##grove -smack -conveyed -ensembles -gardening -##manship -##rup -##stituting -1640 -harvesting -topography -jing -shifters -dormitory -##carriage -##lston -ist -skulls -##stadt -dolores -jewellery -sarawak -##wai -##zier -fences -christy -confinement -tumbling -credibility -fir -stench -##bria -##plication -##nged -##sam -virtues -##belt -marjorie -pba -##eem -##made -celebrates -schooner -agitated -barley -fulfilling -anthropologist -##pro -restrict -novi -regulating -##nent -padres -##rani -##hesive -loyola -tabitha -milky -olson -proprietor -crambidae -guarantees -intercollegiate -ljubljana -hilda -##sko -ignorant -hooded -##lts -sardinia -##lidae -##vation -frontman -privileged -witchcraft -##gp -jammed -laude -poking -##than -bracket -amazement -yunnan -##erus -maharaja -linnaeus -264 -commissioning -milano -peacefully -##logies -akira -rani -regulator -##36 -grasses -##rance -luzon -crows -compiler -gretchen -seaman -edouard -tab -buccaneers -ellington -hamlets -whig -socialists -##anto -directorial -easton -mythological -##kr -##vary -rhineland -semantic -taut -dune -inventions -succeeds -##iter -replication -branched -##pired -jul -prosecuted -kangaroo -penetrated -##avian -middlesbrough -doses -bleak -madam -predatory -relentless -##vili -reluctance -##vir -hailey -crore -silvery -1759 -monstrous -swimmers -transmissions -hawthorn -informing -##eral -toilets -caracas -crouch -kb -##sett -295 -cartel -hadley -##aling -alexia -yvonne -##biology -cinderella -eton -superb -blizzard -stabbing -industrialist -maximus -##gm -##orus -groves -maud -clade -oversized -comedic -##bella -rosen -nomadic -fulham -montane -beverages -galaxies -redundant -swarm -##rot -##folia -##llis -buckinghamshire -fen -bearings -bahadur -##rom -gilles -phased -dynamite -faber -benoit -vip -##ount -##wd -booking -fractured -tailored -anya -spices -westwood -cairns -auditions -inflammation -steamed -##rocity -##acion -##urne -skyla -thereof -watford -torment -archdeacon -transforms -lulu -demeanor -fucked -serge -##sor -mckenna -minas -entertainer -##icide -caress -originate -residue -##sty -1740 -##ilised -##org -beech -##wana -subsidies -##ghton -emptied -gladstone -ru -firefighters -voodoo -##rcle -het -nightingale -tamara -edmond -ingredient -weaknesses -silhouette -285 -compatibility -withdrawing -hampson -##mona -anguish -giggling -##mber -bookstore -##jiang -southernmost -tilting -##vance -bai -economical -rf -briefcase -dreadful -hinted -projections -shattering -totaling -##rogate -analogue -indicted -periodical -fullback -##dman -haynes -##tenberg -##ffs -##ishment -1745 -thirst -stumble -penang -vigorous -##ddling -##kor -##lium -octave -##ove -##enstein -##inen -##ones -siberian -##uti -cbn -repeal -swaying -##vington -khalid -tanaka -unicorn -otago -plastered -lobe -riddle -##rella -perch -##ishing -croydon -filtered -graeme -tripoli -##ossa -crocodile -##chers -sufi -mined -##tung -inferno -lsu -##phi -swelled -utilizes -£2 -cale -periodicals -styx -hike -informally -coop -lund -##tidae -ala -hen -qui -transformations -disposed -sheath -chickens -##cade -fitzroy -sas -silesia -unacceptable -odisha -1650 -sabrina -pe -spokane -ratios -athena -massage -shen -dilemma -##drum -##riz -##hul -corona -doubtful -niall -##pha -##bino -fines -cite -acknowledging -bangor -ballard -bathurst -##resh -huron -mustered -alzheimer -garments -kinase -tyre -warship -##cp -flashback -pulmonary -braun -cheat -kamal -cyclists -constructions -grenades -ndp -traveller -excuses -stomped -signalling -trimmed -futsal -mosques -relevance -##wine -wta -##23 -##vah -##lter -hoc -##riding -optimistic -##´s -deco -sim -interacting -rejecting -moniker -waterways -##ieri -##oku -mayors -gdansk -outnumbered -pearls -##ended -##hampton -fairs -totals -dominating -262 -notions -stairway -compiling -pursed -commodities -grease -yeast -##jong -carthage -griffiths -residual -amc -contraction -laird -sapphire -##marine -##ivated -amalgamation -dissolve -inclination -lyle -packaged -altitudes -suez -canons -graded -lurched -narrowing -boasts -guise -wed -enrico -##ovsky -rower -scarred -bree -cub -iberian -protagonists -bargaining -proposing -trainers -voyages -vans -fishes -##aea -##ivist -##verance -encryption -artworks -kazan -sabre -cleopatra -hepburn -rotting -supremacy -mecklenburg -##brate -burrows -hazards -outgoing -flair -organizes -##ctions -scorpion -##usions -boo -234 -chevalier -dunedin -slapping -##34 -ineligible -pensions -##38 -##omic -manufactures -emails -bismarck -238 -weakening -blackish -ding -mcgee -quo -##rling -northernmost -xx -manpower -greed -sampson -clicking -##ange -##horpe -##inations -##roving -torre -##eptive -##moral -symbolism -38th -asshole -meritorious -outfits -splashed -biographies -sprung -astros -##tale -302 -737 -filly -raoul -nw -tokugawa -linden -clubhouse -##apa -tracts -romano -##pio -putin -tags -##note -chained -dickson -gunshot -moe -gunn -rashid -##tails -zipper -##bas -##nea -contrasted -##ply -##udes -plum -pharaoh -##pile -aw -comedies -ingrid -sandwiches -subdivisions -1100 -mariana -nokia -kamen -hz -delaney -veto -herring -##words -possessive -outlines -##roup -siemens -stairwell -rc -gallantry -messiah -palais -yells -233 -zeppelin -##dm -bolivar -##cede -smackdown -mckinley -##mora -##yt -muted -geologic -finely -unitary -avatar -hamas -maynard -rees -bog -contrasting -##rut -liv -chico -disposition -pixel -##erate -becca -dmitry -yeshiva -narratives -##lva -##ulton -mercenary -sharpe -tempered -navigate -stealth -amassed -keynes -##lini -untouched -##rrie -havoc -lithium -##fighting -abyss -graf -southward -wolverine -balloons -implements -ngos -transitions -##icum -ambushed -concacaf -dormant -economists -##dim -costing -csi -rana -universite -boulders -verity -##llon -collin -mellon -misses -cypress -fluorescent -lifeless -spence -##ulla -crewe -shepard -pak -revelations -##م -jolly -gibbons -paw -##dro -##quel -freeing -##test -shack -fries -palatine -##51 -##hiko -accompaniment -cruising -recycled -##aver -erwin -sorting -synthesizers -dyke -realities -sg -strides -enslaved -wetland -##ghan -competence -gunpowder -grassy -maroon -reactors -objection -##oms -carlson -gearbox -macintosh -radios -shelton -##sho -clergyman -prakash -254 -mongols -trophies -oricon -228 -stimuli -twenty20 -cantonese -cortes -mirrored -##saurus -bhp -cristina -melancholy -##lating -enjoyable -nuevo -##wny -downfall -schumacher -##ind -banging -lausanne -rumbled -paramilitary -reflex -ax -amplitude -migratory -##gall -##ups -midi -barnard -lastly -sherry -##hp -##nall -keystone -##kra -carleton -slippery -##53 -coloring -foe -socket -otter -##rgos -mats -##tose -consultants -bafta -bison -topping -##km -490 -primal -abandonment -transplant -atoll -hideous -mort -pained -reproduced -tae -howling -##turn -unlawful -billionaire -hotter -poised -lansing -##chang -dinamo -retro -messing -nfc -domesday -##mina -blitz -timed -##athing -##kley -ascending -gesturing -##izations -signaled -tis -chinatown -mermaid -savanna -jameson -##aint -catalina -##pet -##hers -cochrane -cy -chatting -##kus -alerted -computation -mused -noelle -majestic -mohawk -campo -octagonal -##sant -##hend -241 -aspiring -##mart -comprehend -iona -paralyzed -shimmering -swindon -rhone -##eley -reputed -configurations -pitchfork -agitation -francais -gillian -lipstick -##ilo -outsiders -pontifical -resisting -bitterness -sewer -rockies -##edd -##ucher -misleading -1756 -exiting -galloway -##nging -risked -##heart -246 -commemoration -schultz -##rka -integrating -##rsa -poses -shrieked -##weiler -guineas -gladys -jerking -owls -goldsmith -nightly -penetrating -##unced -lia -##33 -ignited -betsy -##aring -##thorpe -follower -vigorously -##rave -coded -kiran -knit -zoology -tbilisi -##28 -##bered -repository -govt -deciduous -dino -growling -##bba -enhancement -unleashed -chanting -pussy -biochemistry -##eric -kettle -repression -toxicity -nrhp -##arth -##kko -##bush -ernesto -commended -outspoken -242 -mca -parchment -sms -kristen -##aton -bisexual -raked -glamour -navajo -a2 -conditioned -showcased -##hma -spacious -youthful -##esa -usl -appliances -junta -brest -layne -conglomerate -enchanted -chao -loosened -picasso -circulating -inspect -montevideo -##centric -##kti -piazza -spurred -##aith -bari -freedoms -poultry -stamford -lieu -##ect -indigo -sarcastic -bahia -stump -attach -dvds -frankenstein -lille -approx -scriptures -pollen -##script -nmi -overseen -##ivism -tides -proponent -newmarket -inherit -milling -##erland -centralized -##rou -distributors -credentials -drawers -abbreviation -##lco -##xon -downing -uncomfortably -ripe -##oes -erase -franchises -##ever -populace -##bery -##khar -decomposition -pleas -##tet -daryl -sabah -##stle -##wide -fearless -genie -lesions -annette -##ogist -oboe -appendix -nair -dripped -petitioned -maclean -mosquito -parrot -rpg -hampered -1648 -operatic -reservoirs -##tham -irrelevant -jolt -summarized -##fp -medallion -##taff -##− -clawed -harlow -narrower -goddard -marcia -bodied -fremont -suarez -altering -tempest -mussolini -porn -##isms -sweetly -oversees -walkers -solitude -grimly -shrines -hk -ich -supervisors -hostess -dietrich -legitimacy -brushes -expressive -##yp -dissipated -##rse -localized -systemic -##nikov -gettysburg -##js -##uaries -dialogues -muttering -251 -housekeeper -sicilian -discouraged -##frey -beamed -kaladin -halftime -kidnap -##amo -##llet -1754 -synonymous -depleted -instituto -insulin -reprised -##opsis -clashed -##ctric -interrupting -radcliffe -insisting -medici -1715 -ejected -playfully -turbulent -##47 -starvation -##rini -shipment -rebellious -petersen -verification -merits -##rified -cakes -##charged -1757 -milford -shortages -spying -fidelity -##aker -emitted -storylines -harvested -seismic -##iform -cheung -kilda -theoretically -barbie -lynx -##rgy -##tius -goblin -mata -poisonous -##nburg -reactive -residues -obedience -##евич -conjecture -##rac -401 -hating -sixties -kicker -moaning -motown -##bha -emancipation -neoclassical -##hering -consoles -ebert -professorship -##tures -sustaining -assaults -obeyed -affluent -incurred -tornadoes -##eber -##zow -emphasizing -highlanders -cheated -helmets -##ctus -internship -terence -bony -executions -legislators -berries -peninsular -tinged -##aco -1689 -amplifier -corvette -ribbons -lavish -pennant -##lander -worthless -##chfield -##forms -mariano -pyrenees -expenditures -##icides -chesterfield -mandir -tailor -39th -sergey -nestled -willed -aristocracy -devotees -goodnight -raaf -rumored -weaponry -remy -appropriations -harcourt -burr -riaa -##lence -limitation -unnoticed -guo -soaking -swamps -##tica -collapsing -tatiana -descriptive -brigham -psalm -##chment -maddox -##lization -patti -caliph -##aja -akron -injuring -serra -##ganj -basins -##sari -astonished -launcher -##church -hilary -wilkins -sewing -##sf -stinging -##fia -##ncia -underwood -startup -##ition -compilations -vibrations -embankment -jurist -##nity -bard -juventus -groundwater -kern -palaces -helium -boca -cramped -marissa -soto -##worm -jae -princely -##ggy -faso -bazaar -warmly -##voking -229 -pairing -##lite -##grate -##nets -wien -freaked -ulysses -rebirth -##alia -##rent -mummy -guzman -jimenez -stilled -##nitz -trajectory -tha -woken -archival -professions -##pts -##pta -hilly -shadowy -shrink -##bolt -norwood -glued -migrate -stereotypes -devoid -##pheus -625 -evacuate -horrors -infancy -gotham -knowles -optic -downloaded -sachs -kingsley -parramatta -darryl -mor -##onale -shady -commence -confesses -kan -##meter -##placed -marlborough -roundabout -regents -frigates -io -##imating -gothenburg -revoked -carvings -clockwise -convertible -intruder -##sche -banged -##ogo -vicky -bourgeois -##mony -dupont -footing -##gum -pd -##real -buckle -yun -penthouse -sane -720 -serviced -stakeholders -neumann -bb -##eers -comb -##gam -catchment -pinning -rallies -typing -##elles -forefront -freiburg -sweetie -giacomo -widowed -goodwill -worshipped -aspirations -midday -##vat -fishery -##trick -bournemouth -turk -243 -hearth -ethanol -guadalajara -murmurs -sl -##uge -afforded -scripted -##hta -wah -##jn -coroner -translucent -252 -memorials -puck -progresses -clumsy -##race -315 -candace -recounted -##27 -##slin -##uve -filtering -##mac -howl -strata -heron -leveled -##ays -dubious -##oja -##т -##wheel -citations -exhibiting -##laya -##mics -##pods -turkic -##lberg -injunction -##ennial -##mit -antibodies -##44 -organise -##rigues -cardiovascular -cushion -inverness -##zquez -dia -cocoa -sibling -##tman -##roid -expanse -feasible -tunisian -algiers -##relli -rus -bloomberg -dso -westphalia -bro -tacoma -281 -downloads -##ours -konrad -duran -##hdi -continuum -jett -compares -legislator -secession -##nable -##gues -##zuka -translating -reacher -##gley -##ła -aleppo -##agi -tc -orchards -trapping -linguist -versatile -drumming -postage -calhoun -superiors -##mx -barefoot -leary -##cis -ignacio -alfa -kaplan -##rogen -bratislava -mori -##vot -disturb -haas -313 -cartridges -gilmore -radiated -salford -tunic -hades -##ulsive -archeological -delilah -magistrates -auditioned -brewster -charters -empowerment -blogs -cappella -dynasties -iroquois -whipping -##krishna -raceway -truths -myra -weaken -judah -mcgregor -##horse -mic -refueling -37th -burnley -bosses -markus -premio -query -##gga -dunbar -##economic -darkest -lyndon -sealing -commendation -reappeared -##mun -addicted -ezio -slaughtered -satisfactory -shuffle -##eves -##thic -##uj -fortification -warrington -##otto -resurrected -fargo -mane -##utable -##lei -##space -foreword -ox -##aris -##vern -abrams -hua -##mento -sakura -##alo -uv -sentimental -##skaya -midfield -##eses -sturdy -scrolls -macleod -##kyu -entropy -##lance -mitochondrial -cicero -excelled -thinner -convoys -perceive -##oslav -##urable -systematically -grind -burkina -287 -##tagram -ops -##aman -guantanamo -##cloth -##tite -forcefully -wavy -##jou -pointless -##linger -##tze -layton -portico -superficial -clerical -outlaws -##hism -burials -muir -##inn -creditors -hauling -rattle -##leg -calais -monde -archers -reclaimed -dwell -wexford -hellenic -falsely -remorse -##tek -dough -furnishings -##uttered -gabon -neurological -novice -##igraphy -contemplated -pulpit -nightstand -saratoga -##istan -documenting -pulsing -taluk -##firmed -busted -marital -##rien -disagreements -wasps -##yes -hodge -mcdonnell -mimic -fran -pendant -dhabi -musa -##nington -congratulations -argent -darrell -concussion -losers -regrets -thessaloniki -reversal -donaldson -hardwood -thence -achilles -ritter -##eran -demonic -jurgen -prophets -goethe -eki -classmate -buff -##cking -yank -irrational -##inging -perished -seductive -qur -sourced -##crat -##typic -mustard -ravine -barre -horizontally -characterization -phylogenetic -boise -##dit -##runner -##tower -brutally -intercourse -seduce -##bbing -fay -ferris -ogden -amar -nik -unarmed -##inator -evaluating -kyrgyzstan -sweetness -##lford -##oki -mccormick -meiji -notoriety -stimulate -disrupt -figuring -instructional -mcgrath -##zoo -groundbreaking -##lto -flinch -khorasan -agrarian -bengals -mixer -radiating -##sov -ingram -pitchers -nad -tariff -##cript -tata -##codes -##emi -##ungen -appellate -lehigh -##bled -##giri -brawl -duct -texans -##ciation -##ropolis -skipper -speculative -vomit -doctrines -stresses -253 -davy -graders -whitehead -jozef -timely -cumulative -haryana -paints -appropriately -boon -cactus -##ales -##pid -dow -legions -##pit -perceptions -1730 -picturesque -##yse -periphery -rune -wr -##aha -celtics -sentencing -whoa -##erin -confirms -variance -425 -moines -mathews -spade -rave -m1 -fronted -fx -blending -alleging -reared -##gl -237 -##paper -grassroots -eroded -##free -##physical -directs -ordeal -##sław -accelerate -hacker -rooftop -##inia -lev -buys -cebu -devote -##lce -specialising -##ulsion -choreographed -repetition -warehouses -##ryl -paisley -tuscany -analogy -sorcerer -hash -huts -shards -descends -exclude -nix -chaplin -gaga -ito -vane -##drich -causeway -misconduct -limo -orchestrated -glands -jana -##kot -u2 -##mple -##sons -branching -contrasts -scoop -longed -##virus -chattanooga -##75 -syrup -cornerstone -##tized -##mind -##iaceae -careless -precedence -frescoes -##uet -chilled -consult -modelled -snatch -peat -##thermal -caucasian -humane -relaxation -spins -temperance -##lbert -occupations -lambda -hybrids -moons -mp3 -##oese -247 -rolf -societal -yerevan -ness -##ssler -befriended -mechanized -nominate -trough -boasted -cues -seater -##hom -bends -##tangle -conductors -emptiness -##lmer -eurasian -adriatic -tian -##cie -anxiously -lark -propellers -chichester -jock -ev -2a -##holding -credible -recounts -tori -loyalist -abduction -##hoot -##redo -nepali -##mite -ventral -tempting -##ango -##crats -steered -##wice -javelin -dipping -laborers -prentice -looming -titanium -##ː -badges -emir -tensor -##ntation -egyptians -rash -denies -hawthorne -lombard -showers -wehrmacht -dietary -trojan -##reus -welles -executing -horseshoe -lifeboat -##lak -elsa -infirmary -nearing -roberta -boyer -mutter -trillion -joanne -##fine -##oked -sinks -vortex -uruguayan -clasp -sirius -##block -accelerator -prohibit -sunken -byu -chronological -diplomats -ochreous -510 -symmetrical -1644 -maia -##tology -salts -reigns -atrocities -##ия -hess -bared -issn -##vyn -cater -saturated -##cycle -##isse -sable -voyager -dyer -yusuf -##inge -fountains -wolff -##39 -##nni -engraving -rollins -atheist -ominous -##ault -herr -chariot -martina -strung -##fell -##farlane -horrific -sahib -gazes -saetan -erased -ptolemy -##olic -flushing -lauderdale -analytic -##ices -530 -navarro -beak -gorilla -herrera -broom -guadalupe -raiding -sykes -311 -bsc -deliveries -1720 -invasions -carmichael -tajikistan -thematic -ecumenical -sentiments -onstage -##rians -##brand -##sume -catastrophic -flanks -molten -##arns -waller -aimee -terminating -##icing -alternately -##oche -nehru -printers -outraged -##eving -empires -template -banners -repetitive -za -##oise -vegetarian -##tell -guiana -opt -cavendish -lucknow -synthesized -##hani -##mada -finalized -##ctable -fictitious -mayoral -unreliable -##enham -embracing -peppers -rbis -##chio -##neo -inhibition -slashed -togo -orderly -embroidered -safari -salty -236 -barron -benito -totaled -##dak -pubs -simulated -caden -devin -tolkien -momma -welding -sesame -##ept -gottingen -hardness -630 -shaman -temeraire -620 -adequately -pediatric -##kit -ck -assertion -radicals -composure -cadence -seafood -beaufort -lazarus -mani -warily -cunning -kurdistan -249 -cantata -##kir -ares -##41 -##clusive -nape -townland -geared -insulted -flutter -boating -violate -draper -dumping -malmo -##hh -##romatic -firearm -alta -bono -obscured -##clave -exceeds -panorama -unbelievable -##train -preschool -##essed -disconnected -installing -rescuing -secretaries -accessibility -##castle -##drive -##ifice -##film -bouts -slug -waterway -mindanao -##buro -##ratic -halves -##ل -calming -liter -maternity -adorable -bragg -electrification -mcc -##dote -roxy -schizophrenia -##body -munoz -kaye -whaling -239 -mil -tingling -tolerant -##ago -unconventional -volcanoes -##finder -deportivo -##llie -robson -kaufman -neuroscience -wai -deportation -masovian -scraping -converse -##bh -hacking -bulge -##oun -administratively -yao -580 -amp -mammoth -booster -claremont -hooper -nomenclature -pursuits -mclaughlin -melinda -##sul -catfish -barclay -substrates -taxa -zee -originals -kimberly -packets -padma -##ality -borrowing -ostensibly -solvent -##bri -##genesis -##mist -lukas -shreveport -veracruz -##ь -##lou -##wives -cheney -tt -anatolia -hobbs -##zyn -cyclic -radiant -alistair -greenish -siena -dat -independents -##bation -conform -pieter -hyper -applicant -bradshaw -spores -telangana -vinci -inexpensive -nuclei -322 -jang -nme -soho -spd -##ign -cradled -receptionist -pow -##43 -##rika -fascism -##ifer -experimenting -##ading -##iec -##region -345 -jocelyn -maris -stair -nocturnal -toro -constabulary -elgin -##kker -msc -##giving -##schen -##rase -doherty -doping -sarcastically -batter -maneuvers -##cano -##apple -##gai -##git -intrinsic -##nst -##stor -1753 -showtime -cafes -gasps -lviv -ushered -##thed -fours -restart -astonishment -transmitting -flyer -shrugs -##sau -intriguing -cones -dictated -mushrooms -medial -##kovsky -##elman -escorting -gaped -##26 -godfather -##door -##sell -djs -recaptured -timetable -vila -1710 -3a -aerodrome -mortals -scientology -##orne -angelina -mag -convection -unpaid -insertion -intermittent -lego -##nated -endeavor -kota -pereira -##lz -304 -bwv -glamorgan -insults -agatha -fey -##cend -fleetwood -mahogany -protruding -steamship -zeta -##arty -mcguire -suspense -##sphere -advising -urges -##wala -hurriedly -meteor -gilded -inline -arroyo -stalker -##oge -excitedly -revered -##cure -earle -introductory -##break -##ilde -mutants -puff -pulses -reinforcement -##haling -curses -lizards -stalk -correlated -##fixed -fallout -macquarie -##unas -bearded -denton -heaving -802 -##ocation -winery -assign -dortmund -##lkirk -everest -invariant -charismatic -susie -##elling -bled -lesley -telegram -sumner -bk -##ogen -##к -wilcox -needy -colbert -duval -##iferous -##mbled -allotted -attends -imperative -##hita -replacements -hawker -##inda -insurgency -##zee -##eke -casts -##yla -680 -ives -transitioned -##pack -##powering -authoritative -baylor -flex -cringed -plaintiffs -woodrow -##skie -drastic -ape -aroma -unfolded -commotion -nt -preoccupied -theta -routines -lasers -privatization -wand -domino -ek -clenching -nsa -strategically -showered -bile -handkerchief -pere -storing -christophe -insulting -316 -nakamura -romani -asiatic -magdalena -palma -cruises -stripping -405 -konstantin -soaring -##berman -colloquially -forerunner -havilland -incarcerated -parasites -sincerity -##utus -disks -plank -saigon -##ining -corbin -homo -ornaments -powerhouse -##tlement -chong -fastened -feasibility -idf -morphological -usable -##nish -##zuki -aqueduct -jaguars -keepers -##flies -aleksandr -faust -assigns -ewing -bacterium -hurled -tricky -hungarians -integers -wallis -321 -yamaha -##isha -hushed -oblivion -aviator -evangelist -friars -##eller -monograph -ode -##nary -airplanes -labourers -charms -##nee -1661 -hagen -tnt -rudder -fiesta -transcript -dorothea -ska -inhibitor -maccabi -retorted -raining -encompassed -clauses -menacing -1642 -lineman -##gist -vamps -##ape -##dick -gloom -##rera -dealings -easing -seekers -##nut -##pment -helens -unmanned -##anu -##isson -basics -##amy -##ckman -adjustments -1688 -brutality -horne -##zell -sui -##55 -##mable -aggregator -##thal -rhino -##drick -##vira -counters -zoom -##01 -##rting -mn -montenegrin -packard -##unciation -##♭ -##kki -reclaim -scholastic -thugs -pulsed -##icia -syriac -quan -saddam -banda -kobe -blaming -buddies -dissent -##lusion -##usia -corbett -jaya -delle -erratic -lexie -##hesis -435 -amiga -hermes -##pressing -##leen -chapels -gospels -jamal -##uating -compute -revolving -warp -##sso -##thes -armory -##eras -##gol -antrim -loki -##kow -##asian -##good -##zano -braid -handwriting -subdistrict -funky -pantheon -##iculate -concurrency -estimation -improper -juliana -##his -newcomers -johnstone -staten -communicated -##oco -##alle -sausage -stormy -##stered -##tters -superfamily -##grade -acidic -collateral -tabloid -##oped -##rza -bladder -austen -##ellant -mcgraw -##hay -hannibal -mein -aquino -lucifer -wo -badger -boar -cher -christensen -greenberg -interruption -##kken -jem -244 -mocked -bottoms -cambridgeshire -##lide -sprawling -##bbly -eastwood -ghent -synth -##buck -advisers -##bah -nominally -hapoel -qu -daggers -estranged -fabricated -towels -vinnie -wcw -misunderstanding -anglia -nothin -unmistakable -##dust -##lova -chilly -marquette -truss -##edge -##erine -reece -##lty -##chemist -##connected -272 -308 -41st -bash -raion -waterfalls -##ump -##main -labyrinth -queue -theorist -##istle -bharatiya -flexed -soundtracks -rooney -leftist -patrolling -wharton -plainly -alleviate -eastman -schuster -topographic -engages -immensely -unbearable -fairchild -1620 -dona -lurking -parisian -oliveira -ia -indictment -hahn -bangladeshi -##aster -vivo -##uming -##ential -antonia -expects -indoors -kildare -harlan -##logue -##ogenic -##sities -forgiven -##wat -childish -tavi -##mide -##orra -plausible -grimm -successively -scooted -##bola -##dget -##rith -spartans -emery -flatly -azure -epilogue -##wark -flourish -##iny -##tracted -##overs -##oshi -bestseller -distressed -receipt -spitting -hermit -topological -##cot -drilled -subunit -francs -##layer -eel -##fk -##itas -octopus -footprint -petitions -ufo -##say -##foil -interfering -leaking -palo -##metry -thistle -valiant -##pic -narayan -mcpherson -##fast -gonzales -##ym -##enne -dustin -novgorod -solos -##zman -doin -##raph -##patient -##meyer -soluble -ashland -cuffs -carole -pendleton -whistling -vassal -##river -deviation -revisited -constituents -rallied -rotate -loomed -##eil -##nting -amateurs -augsburg -auschwitz -crowns -skeletons -##cona -bonnet -257 -dummy -globalization -simeon -sleeper -mandal -differentiated -##crow -##mare -milne -bundled -exasperated -talmud -owes -segregated -##feng -##uary -dentist -piracy -props -##rang -devlin -##torium -malicious -paws -##laid -dependency -##ergy -##fers -##enna -258 -pistons -rourke -jed -grammatical -tres -maha -wig -512 -ghostly -jayne -##achal -##creen -##ilis -##lins -##rence -designate -##with -arrogance -cambodian -clones -showdown -throttle -twain -##ception -lobes -metz -nagoya -335 -braking -##furt -385 -roaming -##minster -amin -crippled -##37 -##llary -indifferent -hoffmann -idols -intimidating -1751 -261 -influenza -memo -onions -1748 -bandage -consciously -##landa -##rage -clandestine -observes -swiped -tangle -##ener -##jected -##trum -##bill -##lta -hugs -congresses -josiah -spirited -##dek -humanist -managerial -filmmaking -inmate -rhymes -debuting -grimsby -ur -##laze -duplicate -vigor -##tf -republished -bolshevik -refurbishment -antibiotics -martini -methane -newscasts -royale -horizons -levant -iain -visas -##ischen -paler -##around -manifestation -snuck -alf -chop -futile -pedestal -rehab -##kat -bmg -kerman -res -fairbanks -jarrett -abstraction -saharan -##zek -1746 -procedural -clearer -kincaid -sash -luciano -##ffey -crunch -helmut -##vara -revolutionaries -##tute -creamy -leach -##mmon -1747 -permitting -nes -plight -wendell -##lese -contra -ts -clancy -ipa -mach -staples -autopsy -disturbances -nueva -karin -pontiac -##uding -proxy -venerable -haunt -leto -bergman -expands -##helm -wal -##pipe -canning -celine -cords -obesity -##enary -intrusion -planner -##phate -reasoned -sequencing -307 -harrow -##chon -##dora -marred -mcintyre -repay -tarzan -darting -248 -harrisburg -margarita -repulsed -##hur -##lding -belinda -hamburger -novo -compliant -runways -bingham -registrar -skyscraper -ic -cuthbert -improvisation -livelihood -##corp -##elial -admiring -##dened -sporadic -believer -casablanca -popcorn -##29 -asha -shovel -##bek -##dice -coiled -tangible -##dez -casper -elsie -resin -tenderness -rectory -##ivision -avail -sonar -##mori -boutique -##dier -guerre -bathed -upbringing -vaulted -sandals -blessings -##naut -##utnant -1680 -306 -foxes -pia -corrosion -hesitantly -confederates -crystalline -footprints -shapiro -tirana -valentin -drones -45th -microscope -shipments -texted -inquisition -wry -guernsey -unauthorized -resigning -760 -ripple -schubert -stu -reassure -felony -##ardo -brittle -koreans -##havan -##ives -dun -implicit -tyres -##aldi -##lth -magnolia -##ehan -##puri -##poulos -aggressively -fei -gr -familiarity -##poo -indicative -##trust -fundamentally -jimmie -overrun -395 -anchors -moans -##opus -britannia -armagh -##ggle -purposely -seizing -##vao -bewildered -mundane -avoidance -cosmopolitan -geometridae -quartermaster -caf -415 -chatter -engulfed -gleam -purge -##icate -juliette -jurisprudence -guerra -revisions -##bn -casimir -brew -##jm -1749 -clapton -cloudy -conde -hermitage -278 -simulations -torches -vincenzo -matteo -##rill -hidalgo -booming -westbound -accomplishment -tentacles -unaffected -##sius -annabelle -flopped -sloping -##litz -dreamer -interceptor -vu -##loh -consecration -copying -messaging -breaker -climates -hospitalized -1752 -torino -afternoons -winfield -witnessing -##teacher -breakers -choirs -sawmill -coldly -##ege -sipping -haste -uninhabited -conical -bibliography -pamphlets -severn -edict -##oca -deux -illnesses -grips -##pl -rehearsals -sis -thinkers -tame -##keepers -1690 -acacia -reformer -##osed -##rys -shuffling -##iring -##shima -eastbound -ionic -rhea -flees -littered -##oum -rocker -vomiting -groaning -champ -overwhelmingly -civilizations -paces -sloop -adoptive -##tish -skaters -##vres -aiding -mango -##joy -nikola -shriek -##ignon -pharmaceuticals -##mg -tuna -calvert -gustavo -stocked -yearbook -##urai -##mana -computed -subsp -riff -hanoi -kelvin -hamid -moors -pastures -summons -jihad -nectar -##ctors -bayou -untitled -pleasing -vastly -republics -intellect -##η -##ulio -##tou -crumbling -stylistic -sb -##ی -consolation -frequented -h₂o -walden -widows -##iens -404 -##ignment -chunks -improves -288 -grit -recited -##dev -snarl -sociological -##arte -##gul -inquired -##held -bruise -clube -consultancy -homogeneous -hornets -multiplication -pasta -prick -savior -##grin -##kou -##phile -yoon -##gara -grimes -vanishing -cheering -reacting -bn -distillery -##quisite -##vity -coe -dockyard -massif -##jord -escorts -voss -##valent -byte -chopped -hawke -illusions -workings -floats -##koto -##vac -kv -annapolis -madden -##onus -alvaro -noctuidae -##cum -##scopic -avenge -steamboat -forte -illustrates -erika -##trip -570 -dew -nationalities -bran -manifested -thirsty -diversified -muscled -reborn -##standing -arson -##lessness -##dran -##logram -##boys -##kushima -##vious -willoughby -##phobia -286 -alsace -dashboard -yuki -##chai -granville -myspace -publicized -tricked -##gang -adjective -##ater -relic -reorganisation -enthusiastically -indications -saxe -##lassified -consolidate -iec -padua -helplessly -ramps -renaming -regulars -pedestrians -accents -convicts -inaccurate -lowers -mana -##pati -barrie -bjp -outta -someplace -berwick -flanking -invoked -marrow -sparsely -excerpts -clothed -rei -##ginal -wept -##straße -##vish -alexa -excel -##ptive -membranes -aquitaine -creeks -cutler -sheppard -implementations -ns -##dur -fragrance -budge -concordia -magnesium -marcelo -##antes -gladly -vibrating -##rral -##ggles -montrose -##omba -lew -seamus -1630 -cocky -##ament -##uen -bjorn -##rrick -fielder -fluttering -##lase -methyl -kimberley -mcdowell -reductions -barbed -##jic -##tonic -aeronautical -condensed -distracting -##promising -huffed -##cala -##sle -claudius -invincible -missy -pious -balthazar -ci -##lang -butte -combo -orson -##dication -myriad -1707 -silenced -##fed -##rh -coco -netball -yourselves -##oza -clarify -heller -peg -durban -etudes -offender -roast -blackmail -curvature -##woods -vile -309 -illicit -suriname -##linson -overture -1685 -bubbling -gymnast -tucking -##mming -##ouin -maldives -##bala -gurney -##dda -##eased -##oides -backside -pinto -jars -racehorse -tending -##rdial -baronetcy -wiener -duly -##rke -barbarian -cupping -flawed -##thesis -bertha -pleistocene -puddle -swearing -##nob -##tically -fleeting -prostate -amulet -educating -##mined -##iti -##tler -75th -jens -respondents -analytics -cavaliers -papacy -raju -##iente -##ulum -##tip -funnel -271 -disneyland -##lley -sociologist -##iam -2500 -faulkner -louvre -menon -##dson -276 -##ower -afterlife -mannheim -peptide -referees -comedians -meaningless -##anger -##laise -fabrics -hurley -renal -sleeps -##bour -##icle -breakout -kristin -roadside -animator -clover -disdain -unsafe -redesign -##urity -firth -barnsley -portage -reset -narrows -268 -commandos -expansive -speechless -tubular -##lux -essendon -eyelashes -smashwords -##yad -##bang -##claim -craved -sprinted -chet -somme -astor -wrocław -orton -266 -bane -##erving -##uing -mischief -##amps -##sund -scaling -terre -##xious -impairment -offenses -undermine -moi -soy -contiguous -arcadia -inuit -seam -##tops -macbeth -rebelled -##icative -##iot -590 -elaborated -frs -uniformed -##dberg -259 -powerless -priscilla -stimulated -980 -qc -arboretum -frustrating -trieste -bullock -##nified -enriched -glistening -intern -##adia -locus -nouvelle -ollie -ike -lash -starboard -ee -tapestry -headlined -hove -rigged -##vite -pollock -##yme -thrive -clustered -cas -roi -gleamed -olympiad -##lino -pressured -regimes -##hosis -##lick -ripley -##ophone -kickoff -gallon -rockwell -##arable -crusader -glue -revolutions -scrambling -1714 -grover -##jure -englishman -aztec -263 -contemplating -coven -ipad -preach -triumphant -tufts -##esian -rotational -##phus -328 -falkland -##brates -strewn -clarissa -rejoin -environmentally -glint -banded -drenched -moat -albanians -johor -rr -maestro -malley -nouveau -shaded -taxonomy -v6 -adhere -bunk -airfields -##ritan -1741 -encompass -remington -tran -##erative -amelie -mazda -friar -morals -passions -##zai -breadth -vis -##hae -argus -burnham -caressing -insider -rudd -##imov -##mini -##rso -italianate -murderous -textual -wainwright -armada -bam -weave -timer -##taken -##nh -fra -##crest -ardent -salazar -taps -tunis -##ntino -allegro -gland -philanthropic -##chester -implication -##optera -esq -judas -noticeably -wynn -##dara -inched -indexed -crises -villiers -bandit -royalties -patterned -cupboard -interspersed -accessory -isla -kendrick -entourage -stitches -##esthesia -headwaters -##ior -interlude -distraught -draught -1727 -##basket -biased -sy -transient -triad -subgenus -adapting -kidd -shortstop -##umatic -dimly -spiked -mcleod -reprint -nellie -pretoria -windmill -##cek -singled -##mps -273 -reunite -##orous -747 -bankers -outlying -##omp -##ports -##tream -apologies -cosmetics -patsy -##deh -##ocks -##yson -bender -nantes -serene -##nad -lucha -mmm -323 -##cius -##gli -cmll -coinage -nestor -juarez -##rook -smeared -sprayed -twitching -sterile -irina -embodied -juveniles -enveloped -miscellaneous -cancers -dq -gulped -luisa -crested -swat -donegal -ref -##anov -##acker -hearst -mercantile -##lika -doorbell -ua -vicki -##alla -##som -bilbao -psychologists -stryker -sw -horsemen -turkmenistan -wits -##national -anson -mathew -screenings -##umb -rihanna -##agne -##nessy -aisles -##iani -##osphere -hines -kenton -saskatoon -tasha -truncated -##champ -##itan -mildred -advises -fredrik -interpreting -inhibitors -##athi -spectroscopy -##hab -##kong -karim -panda -##oia -##nail -##vc -conqueror -kgb -leukemia -##dity -arrivals -cheered -pisa -phosphorus -shielded -##riated -mammal -unitarian -urgently -chopin -sanitary -##mission -spicy -drugged -hinges -##tort -tipping -trier -impoverished -westchester -##caster -267 -epoch -nonstop -##gman -##khov -aromatic -centrally -cerro -##tively -##vio -billions -modulation -sedimentary -283 -facilitating -outrageous -goldstein -##eak -##kt -ld -maitland -penultimate -pollard -##dance -fleets -spaceship -vertebrae -##nig -alcoholism -als -recital -##bham -##ference -##omics -m2 -##bm -trois -##tropical -##в -commemorates -##meric -marge -##raction -1643 -670 -cosmetic -ravaged -##ige -catastrophe -eng -##shida -albrecht -arterial -bellamy -decor -harmon -##rde -bulbs -synchronized -vito -easiest -shetland -shielding -wnba -##glers -##ssar -##riam -brianna -cumbria -##aceous -##rard -cores -thayer -##nsk -brood -hilltop -luminous -carts -keynote -larkin -logos -##cta -##ا -##mund -##quay -lilith -tinted -277 -wrestle -mobilization -##uses -sequential -siam -bloomfield -takahashi -274 -##ieving -presenters -ringo -blazed -witty -##oven -##ignant -devastation -haydn -harmed -newt -therese -##peed -gershwin -molina -rabbis -sudanese -001 -innate -restarted -##sack -##fus -slices -wb -##shah -enroll -hypothetical -hysterical -1743 -fabio -indefinite -warped -##hg -exchanging -525 -unsuitable -##sboro -gallo -1603 -bret -cobalt -homemade -##hunter -mx -operatives -##dhar -terraces -durable -latch -pens -whorls -##ctuated -##eaux -billing -ligament -succumbed -##gly -regulators -spawn -##brick -##stead -filmfare -rochelle -##nzo -1725 -circumstance -saber -supplements -##nsky -##tson -crowe -wellesley -carrot -##9th -##movable -primate -drury -sincerely -topical -##mad -##rao -callahan -kyiv -smarter -tits -undo -##yeh -announcements -anthologies -barrio -nebula -##islaus -##shaft -##tyn -bodyguards -2021 -assassinate -barns -emmett -scully -##mah -##yd -##eland -##tino -##itarian -demoted -gorman -lashed -prized -adventist -writ -##gui -alla -invertebrates -##ausen -1641 -amman -1742 -align -healy -redistribution -##gf -##rize -insulation -##drop -adherents -hezbollah -vitro -ferns -yanking -269 -php -registering -uppsala -cheerleading -confines -mischievous -tully -##ross -49th -docked -roam -stipulated -pumpkin -##bry -prompt -##ezer -blindly -shuddering -craftsmen -frail -scented -katharine -scramble -shaggy -sponge -helix -zaragoza -279 -##52 -43rd -backlash -fontaine -seizures -posse -cowan -nonfiction -telenovela -wwii -hammered -undone -##gpur -encircled -irs -##ivation -artefacts -oneself -searing -smallpox -##belle -##osaurus -shandong -breached -upland -blushing -rankin -infinitely -psyche -tolerated -docking -evicted -##col -unmarked -##lving -gnome -lettering -litres -musique -##oint -benevolent -##jal -blackened -##anna -mccall -racers -tingle -##ocene -##orestation -introductions -radically -292 -##hiff -##باد -1610 -1739 -munchen -plead -##nka -condo -scissors -##sight -##tens -apprehension -##cey -##yin -hallmark -watering -formulas -sequels -##llas -aggravated -bae -commencing -##building -enfield -prohibits -marne -vedic -civilized -euclidean -jagger -beforehand -blasts -dumont -##arney -##nem -740 -conversions -hierarchical -rios -simulator -##dya -##lellan -hedges -oleg -thrusts -shadowed -darby -maximize -1744 -gregorian -##nded -##routed -sham -unspecified -##hog -emory -factual -##smo -##tp -fooled -##rger -ortega -wellness -marlon -##oton -##urance -casket -keating -ley -enclave -##ayan -char -influencing -jia -##chenko -412 -ammonia -erebidae -incompatible -violins -cornered -##arat -grooves -astronauts -columbian -rampant -fabrication -kyushu -mahmud -vanish -##dern -mesopotamia -##lete -ict -##rgen -caspian -kenji -pitted -##vered -999 -grimace -roanoke -tchaikovsky -twinned -##analysis -##awan -xinjiang -arias -clemson -kazakh -sizable -1662 -##khand -##vard -plunge -tatum -vittorio -##nden -cholera -##dana -##oper -bracing -indifference -projectile -superliga -##chee -realises -upgrading -299 -porte -retribution -##vies -nk -stil -##resses -ama -bureaucracy -blackberry -bosch -testosterone -collapses -greer -##pathic -ioc -fifties -malls -##erved -bao -baskets -adolescents -siegfried -##osity -##tosis -mantra -detecting -existent -fledgling -##cchi -dissatisfied -gan -telecommunication -mingled -sobbed -6000 -controversies -outdated -taxis -##raus -fright -slams -##lham -##fect -##tten -detectors -fetal -tanned -##uw -fray -goth -olympian -skipping -mandates -scratches -sheng -unspoken -hyundai -tracey -hotspur -restrictive -##buch -americana -mundo -##bari -burroughs -diva -vulcan -##6th -distinctions -thumping -##ngen -mikey -sheds -fide -rescues -springsteen -vested -valuation -##ece -##ely -pinnacle -rake -sylvie -##edo -almond -quivering -##irus -alteration -faltered -##wad -51st -hydra -ticked -##kato -recommends -##dicated -antigua -arjun -stagecoach -wilfred -trickle -pronouns -##pon -aryan -nighttime -##anian -gall -pea -stitch -##hei -leung -milos -##dini -eritrea -nexus -starved -snowfall -kant -parasitic -cot -discus -hana -strikers -appleton -kitchens -##erina -##partisan -##itha -##vius -disclose -metis -##channel -1701 -tesla -##vera -fitch -1735 -blooded -##tila -decimal -##tang -##bai -cyclones -eun -bottled -peas -pensacola -basha -bolivian -crabs -boil -lanterns -partridge -roofed -1645 -necks -##phila -opined -patting -##kla -##lland -chuckles -volta -whereupon -##nche -devout -euroleague -suicidal -##dee -inherently -involuntary -knitting -nasser -##hide -puppets -colourful -courageous -southend -stills -miraculous -hodgson -richer -rochdale -ethernet -greta -uniting -prism -umm -##haya -##itical -##utation -deterioration -pointe -prowess -##ropriation -lids -scranton -billings -subcontinent -##koff -##scope -brute -kellogg -psalms -degraded -##vez -stanisław -##ructured -ferreira -pun -astonishing -gunnar -##yat -arya -prc -gottfried -##tight -excursion -##ographer -dina -##quil -##nare -huffington -illustrious -wilbur -gundam -verandah -##zard -naacp -##odle -constructive -fjord -kade -##naud -generosity -thrilling -baseline -cayman -frankish -plastics -accommodations -zoological -##fting -cedric -qb -motorized -##dome -##otted -squealed -tackled -canucks -budgets -situ -asthma -dail -gabled -grasslands -whimpered -writhing -judgments -##65 -minnie -pv -##carbon -bananas -grille -domes -monique -odin -maguire -markham -tierney -##estra -##chua -libel -poke -speedy -atrium -laval -notwithstanding -##edly -fai -kala -##sur -robb -##sma -listings -luz -supplementary -tianjin -##acing -enzo -jd -ric -scanner -croats -transcribed -##49 -arden -cv -##hair -##raphy -##lver -##uy -357 -seventies -staggering -alam -horticultural -hs -regression -timbers -blasting -##ounded -montagu -manipulating -##cit -catalytic -1550 -troopers -##meo -condemnation -fitzpatrick -##oire -##roved -inexperienced -1670 -castes -##lative -outing -314 -dubois -flicking -quarrel -ste -learners -1625 -iq -whistled -##class -282 -classify -tariffs -temperament -355 -folly -liszt -##yles -immersed -jordanian -ceasefire -apparel -extras -maru -fished -##bio -harta -stockport -assortment -craftsman -paralysis -transmitters -##cola -blindness -##wk -fatally -proficiency -solemnly -##orno -repairing -amore -groceries -ultraviolet -##chase -schoolhouse -##tua -resurgence -nailed -##otype -##× -ruse -saliva -diagrams -##tructing -albans -rann -thirties -1b -antennas -hilarious -cougars -paddington -stats -##eger -breakaway -ipod -reza -authorship -prohibiting -scoffed -##etz -##ttle -conscription -defected -trondheim -##fires -ivanov -keenan -##adan -##ciful -##fb -##slow -locating -##ials -##tford -cadiz -basalt -blankly -interned -rags -rattling -##tick -carpathian -reassured -sync -bum -guildford -iss -staunch -##onga -astronomers -sera -sofie -emergencies -susquehanna -##heard -duc -mastery -vh1 -williamsburg -bayer -buckled -craving -##khan -##rdes -bloomington -##write -alton -barbecue -##bians -justine -##hri -##ndt -delightful -smartphone -newtown -photon -retrieval -peugeot -hissing -##monium -##orough -flavors -lighted -relaunched -tainted -##games -##lysis -anarchy -microscopic -hopping -adept -evade -evie -##beau -inhibit -sinn -adjustable -hurst -intuition -wilton -cisco -44th -lawful -lowlands -stockings -thierry -##dalen -##hila -##nai -fates -prank -tb -maison -lobbied -provocative -1724 -4a -utopia -##qual -carbonate -gujarati -purcell -##rford -curtiss -##mei -overgrown -arenas -mediation -swallows -##rnik -respectful -turnbull -##hedron -##hope -alyssa -ozone -##ʻi -ami -gestapo -johansson -snooker -canteen -cuff -declines -empathy -stigma -##ags -##iner -##raine -taxpayers -gui -volga -##wright -##copic -lifespan -overcame -tattooed -enactment -giggles -##ador -##camp -barrington -bribe -obligatory -orbiting -peng -##enas -elusive -sucker -##vating -cong -hardship -empowered -anticipating -estrada -cryptic -greasy -detainees -planck -sudbury -plaid -dod -marriott -kayla -##ears -##vb -##zd -mortally -##hein -cognition -radha -319 -liechtenstein -meade -richly -argyle -harpsichord -liberalism -trumpets -lauded -tyrant -salsa -tiled -lear -promoters -reused -slicing -trident -##chuk -##gami -##lka -cantor -checkpoint -##points -gaul -leger -mammalian -##tov -##aar -##schaft -doha -frenchman -nirvana -##vino -delgado -headlining -##eron -##iography -jug -tko -1649 -naga -intersections -##jia -benfica -nawab -##suka -ashford -gulp -##deck -##vill -##rug -brentford -frazier -pleasures -dunne -potsdam -shenzhen -dentistry -##tec -flanagan -##dorff -##hear -chorale -dinah -prem -quezon -##rogated -relinquished -sutra -terri -##pani -flaps -##rissa -poly -##rnet -homme -aback -##eki -linger -womb -##kson -##lewood -doorstep -orthodoxy -threaded -westfield -##rval -dioceses -fridays -subsided -##gata -loyalists -##biotic -##ettes -letterman -lunatic -prelate -tenderly -invariably -souza -thug -winslow -##otide -furlongs -gogh -jeopardy -##runa -pegasus -##umble -humiliated -standalone -tagged -##roller -freshmen -klan -##bright -attaining -initiating -transatlantic -logged -viz -##uance -1723 -combatants -intervening -stephane -chieftain -despised -grazed -317 -cdc -galveston -godzilla -macro -simulate -##planes -parades -##esses -960 -##ductive -##unes -equator -overdose -##cans -##hosh -##lifting -joshi -epstein -sonora -treacherous -aquatics -manchu -responsive -##sation -supervisory -##christ -##llins -##ibar -##balance -##uso -kimball -karlsruhe -mab -##emy -ignores -phonetic -reuters -spaghetti -820 -almighty -danzig -rumbling -tombstone -designations -lured -outset -##felt -supermarkets -##wt -grupo -kei -kraft -susanna -##blood -comprehension -genealogy -##aghan -##verted -redding -##ythe -1722 -bowing -##pore -##roi -lest -sharpened -fulbright -valkyrie -sikhs -##unds -swans -bouquet -merritt -##tage -##venting -commuted -redhead -clerks -leasing -cesare -dea -hazy -##vances -fledged -greenfield -servicemen -##gical -armando -blackout -dt -sagged -downloadable -intra -potion -pods -##4th -##mism -xp -attendants -gambia -stale -##ntine -plump -asteroids -rediscovered -buds -flea -hive -##neas -1737 -classifications -debuts -##eles -olympus -scala -##eurs -##gno -##mute -hummed -sigismund -visuals -wiggled -await -pilasters -clench -sulfate -##ances -bellevue -enigma -trainee -snort -##sw -clouded -denim -##rank -##rder -churning -hartman -lodges -riches -sima -##missible -accountable -socrates -regulates -mueller -##cr -1702 -avoids -solids -himalayas -nutrient -pup -##jevic -squat -fades -nec -##lates -##pina -##rona -##ου -privateer -tequila -##gative -##mpton -apt -hornet -immortals -##dou -asturias -cleansing -dario -##rries -##anta -etymology -servicing -zhejiang -##venor -##nx -horned -erasmus -rayon -relocating -£10 -##bags -escalated -promenade -stubble -2010s -artisans -axial -liquids -mora -sho -yoo -##tsky -bundles -oldies -##nally -notification -bastion -##ths -sparkle -##lved -1728 -leash -pathogen -highs -##hmi -immature -880 -gonzaga -ignatius -mansions -monterrey -sweets -bryson -##loe -polled -regatta -brightest -pei -rosy -squid -hatfield -payroll -addict -meath -cornerback -heaviest -lodging -##mage -capcom -rippled -##sily -barnet -mayhem -ymca -snuggled -rousseau -##cute -blanchard -284 -fragmented -leighton -chromosomes -risking -##md -##strel -##utter -corinne -coyotes -cynical -hiroshi -yeomanry -##ractive -ebook -grading -mandela -plume -agustin -magdalene -##rkin -bea -femme -trafford -##coll -##lun -##tance -52nd -fourier -upton -##mental -camilla -gust -iihf -islamabad -longevity -##kala -feldman -netting -##rization -endeavour -foraging -mfa -orr -##open -greyish -contradiction -graz -##ruff -handicapped -marlene -tweed -oaxaca -spp -campos -miocene -pri -configured -cooks -pluto -cozy -pornographic -##entes -70th -fairness -glided -jonny -lynne -rounding -sired -##emon -##nist -remade -uncover -##mack -complied -lei -newsweek -##jured -##parts -##enting -##pg -293 -finer -guerrillas -athenian -deng -disused -stepmother -accuse -gingerly -seduction -521 -confronting -##walker -##going -gora -nostalgia -sabres -virginity -wrenched -##minated -syndication -wielding -eyre -##56 -##gnon -##igny -behaved -taxpayer -sweeps -##growth -childless -gallant -##ywood -amplified -geraldine -scrape -##ffi -babylonian -fresco -##rdan -##kney -##position -1718 -restricting -tack -fukuoka -osborn -selector -partnering -##dlow -318 -gnu -kia -tak -whitley -gables -##54 -##mania -mri -softness -immersion -##bots -##evsky -1713 -chilling -insignificant -pcs -##uis -elites -lina -purported -supplemental -teaming -##americana -##dding -##inton -proficient -rouen -##nage -##rret -niccolo -selects -##bread -fluffy -1621 -gruff -knotted -mukherjee -polgara -thrash -nicholls -secluded -smoothing -thru -corsica -loaf -whitaker -inquiries -##rrier -##kam -indochina -289 -marlins -myles -peking -##tea -extracts -pastry -superhuman -connacht -vogel -##ditional -##het -##udged -##lash -gloss -quarries -refit -teaser -##alic -##gaon -20s -materialized -sling -camped -pickering -tung -tracker -pursuant -##cide -cranes -soc -##cini -##typical -##viere -anhalt -overboard -workout -chores -fares -orphaned -stains -##logie -fenton -surpassing -joyah -triggers -##itte -grandmaster -##lass -##lists -clapping -fraudulent -ledger -nagasaki -##cor -##nosis -##tsa -eucalyptus -tun -##icio -##rney -##tara -dax -heroism -ina -wrexham -onboard -unsigned -##dates -moshe -galley -winnie -droplets -exiles -praises -watered -noodles -##aia -fein -adi -leland -multicultural -stink -bingo -comets -erskine -modernized -canned -constraint -domestically -chemotherapy -featherweight -stifled -##mum -darkly -irresistible -refreshing -hasty -isolate -##oys -kitchener -planners -##wehr -cages -yarn -implant -toulon -elects -childbirth -yue -##lind -##lone -cn -rightful -sportsman -junctions -remodeled -specifies -##rgh -291 -##oons -complimented -##urgent -lister -ot -##logic -bequeathed -cheekbones -fontana -gabby -##dial -amadeus -corrugated -maverick -resented -triangles -##hered -##usly -nazareth -tyrol -1675 -assent -poorer -sectional -aegean -##cous -296 -nylon -ghanaian -##egorical -##weig -cushions -forbid -fusiliers -obstruction -somerville -##scia -dime -earrings -elliptical -leyte -oder -polymers -timmy -atm -midtown -piloted -settles -continual -externally -mayfield -##uh -enrichment -henson -keane -persians -1733 -benji -braden -pep -324 -##efe -contenders -pepsi -valet -##isches -298 -##asse -##earing -goofy -stroll -##amen -authoritarian -occurrences -adversary -ahmedabad -tangent -toppled -dorchester -1672 -modernism -marxism -islamist -charlemagne -exponential -racks -unicode -brunette -mbc -pic -skirmish -##bund -##lad -##powered -##yst -hoisted -messina -shatter -##ctum -jedi -vantage -##music -##neil -clemens -mahmoud -corrupted -authentication -lowry -nils -##washed -omnibus -wounding -jillian -##itors -##opped -serialized -narcotics -handheld -##arm -##plicity -intersecting -stimulating -##onis -crate -fellowships -hemingway -casinos -climatic -fordham -copeland -drip -beatty -leaflets -robber -brothel -madeira -##hedral -sphinx -ultrasound -##vana -valor -forbade -leonid -villas -##aldo -duane -marquez -##cytes -disadvantaged -forearms -kawasaki -reacts -consular -lax -uncles -uphold -##hopper -concepcion -dorsey -lass -##izan -arching -passageway -1708 -researches -tia -internationals -##graphs -##opers -distinguishes -javanese -divert -##uven -plotted -##listic -##rwin -##erik -##tify -affirmative -signifies -validation -##bson -kari -felicity -georgina -zulu -##eros -##rained -##rath -overcoming -##dot -argyll -##rbin -1734 -chiba -ratification -windy -earls -parapet -##marks -hunan -pristine -astrid -punta -##gart -brodie -##kota -##oder -malaga -minerva -rouse -##phonic -bellowed -pagoda -portals -reclamation -##gur -##odies -##⁄₄ -parentheses -quoting -allergic -palette -showcases -benefactor -heartland -nonlinear -##tness -bladed -cheerfully -scans -##ety -##hone -1666 -girlfriends -pedersen -hiram -sous -##liche -##nator -1683 -##nery -##orio -##umen -bobo -primaries -smiley -##cb -unearthed -uniformly -fis -metadata -1635 -ind -##oted -recoil -##titles -##tura -##ια -406 -hilbert -jamestown -mcmillan -tulane -seychelles -##frid -antics -coli -fated -stucco -##grants -1654 -bulky -accolades -arrays -caledonian -carnage -optimism -puebla -##tative -##cave -enforcing -rotherham -seo -dunlop -aeronautics -chimed -incline -zoning -archduke -hellenistic -##oses -##sions -candi -thong -##ople -magnate -rustic -##rsk -projective -slant -##offs -danes -hollis -vocalists -##ammed -congenital -contend -gesellschaft -##ocating -##pressive -douglass -quieter -##cm -##kshi -howled -salim -spontaneously -townsville -buena -southport -##bold -kato -1638 -faerie -stiffly -##vus -##rled -297 -flawless -realising -taboo -##7th -bytes -straightening -356 -jena -##hid -##rmin -cartwright -berber -bertram -soloists -411 -noses -417 -coping -fission -hardin -inca -##cen -1717 -mobilized -vhf -##raf -biscuits -curate -##85 -##anial -331 -gaunt -neighbourhoods -1540 -##abas -blanca -bypassed -sockets -behold -coincidentally -##bane -nara -shave -splinter -terrific -##arion -##erian -commonplace -juris -redwood -waistband -boxed -caitlin -fingerprints -jennie -naturalized -##ired -balfour -craters -jody -bungalow -hugely -quilt -glitter -pigeons -undertaker -bulging -constrained -goo -##sil -##akh -assimilation -reworked -##person -persuasion -##pants -felicia -##cliff -##ulent -1732 -explodes -##dun -##inium -##zic -lyman -vulture -hog -overlook -begs -northwards -ow -spoil -##urer -fatima -favorably -accumulate -sargent -sorority -corresponded -dispersal -kochi -toned -##imi -##lita -internacional -newfound -##agger -##lynn -##rigue -booths -peanuts -##eborg -medicare -muriel -nur -##uram -crates -millennia -pajamas -worsened -##breakers -jimi -vanuatu -yawned -##udeau -carousel -##hony -hurdle -##ccus -##mounted -##pod -rv -##eche -airship -ambiguity -compulsion -recapture -##claiming -arthritis -##osomal -1667 -asserting -ngc -sniffing -dade -discontent -glendale -ported -##amina -defamation -rammed -##scent -fling -livingstone -##fleet -875 -##ppy -apocalyptic -comrade -lcd -##lowe -cessna -eine -persecuted -subsistence -demi -hoop -reliefs -710 -coptic -progressing -stemmed -perpetrators -1665 -priestess -##nio -dobson -ebony -rooster -itf -tortricidae -##bbon -##jian -cleanup -##jean -##øy -1721 -eighties -taxonomic -holiness -##hearted -##spar -antilles -showcasing -stabilized -##nb -gia -mascara -michelangelo -dawned -##uria -##vinsky -extinguished -fitz -grotesque -£100 -##fera -##loid -##mous -barges -neue -throbbed -cipher -johnnie -##a1 -##mpt -outburst -##swick -spearheaded -administrations -c1 -heartbreak -pixels -pleasantly -##enay -lombardy -plush -##nsed -bobbie -##hly -reapers -tremor -xiang -minogue -substantive -hitch -barak -##wyl -kwan -##encia -910 -obscene -elegance -indus -surfer -bribery -conserve -##hyllum -##masters -horatio -##fat -apes -rebound -psychotic -##pour -iteration -##mium -##vani -botanic -horribly -antiques -dispose -paxton -##hli -##wg -timeless -1704 -disregard -engraver -hounds -##bau -##version -looted -uno -facilitates -groans -masjid -rutland -antibody -disqualification -decatur -footballers -quake -slacks -48th -rein -scribe -stabilize -commits -exemplary -tho -##hort -##chison -pantry -traversed -##hiti -disrepair -identifiable -vibrated -baccalaureate -##nnis -csa -interviewing -##iensis -##raße -greaves -wealthiest -343 -classed -jogged -£5 -##58 -##atal -illuminating -knicks -respecting -##uno -scrubbed -##iji -##dles -kruger -moods -growls -raider -silvia -chefs -kam -vr -cree -percival -##terol -gunter -counterattack -defiant -henan -ze -##rasia -##riety -equivalence -submissions -##fra -##thor -bautista -mechanically -##heater -cornice -herbal -templar -##mering -outputs -ruining -ligand -renumbered -extravagant -mika -blockbuster -eta -insurrection -##ilia -darkening -ferocious -pianos -strife -kinship -##aer -melee -##anor -##iste -##may -##oue -decidedly -weep -##jad -##missive -##ppel -354 -puget -unease -##gnant -1629 -hammering -kassel -ob -wessex -##lga -bromwich -egan -paranoia -utilization -##atable -##idad -contradictory -provoke -##ols -##ouring -##tangled -knesset -##very -##lette -plumbing -##sden -##¹ -greensboro -occult -sniff -338 -zev -beaming -gamer -haggard -mahal -##olt -##pins -mendes -utmost -briefing -gunnery -##gut -##pher -##zh -##rok -1679 -khalifa -sonya -##boot -principals -urbana -wiring -##liffe -##minating -##rrado -dahl -nyu -skepticism -np -townspeople -ithaca -lobster -somethin -##fur -##arina -##−1 -freighter -zimmerman -biceps -contractual -##herton -amend -hurrying -subconscious -##anal -336 -meng -clermont -spawning -##eia -##lub -dignitaries -impetus -snacks -spotting -twigs -##bilis -##cz -##ouk -libertadores -nic -skylar -##aina -##firm -gustave -asean -##anum -dieter -legislatures -flirt -bromley -trolls -umar -##bbies -##tyle -blah -parc -bridgeport -crank -negligence -##nction -46th -constantin -molded -bandages -seriousness -00pm -siegel -carpets -compartments -upbeat -statehood -##dner -##edging -marko -730 -platt -##hane -paving -##iy -1738 -abbess -impatience -limousine -nbl -##talk -441 -lucille -mojo -nightfall -robbers -##nais -karel -brisk -calves -replicate -ascribed -telescopes -##olf -intimidated -##reen -ballast -specialization -##sit -aerodynamic -caliphate -rainer -visionary -##arded -epsilon -##aday -##onte -aggregation -auditory -boosted -reunification -kathmandu -loco -robyn -402 -acknowledges -appointing -humanoid -newell -redeveloped -restraints -##tained -barbarians -chopper -1609 -italiana -##lez -##lho -investigates -wrestlemania -##anies -##bib -690 -##falls -creaked -dragoons -gravely -minions -stupidity -volley -##harat -##week -musik -##eries -##uously -fungal -massimo -semantics -malvern -##ahl -##pee -discourage -embryo -imperialism -1910s -profoundly -##ddled -jiangsu -sparkled -stat -##holz -sweatshirt -tobin -##iction -sneered -##cheon -##oit -brit -causal -smyth -##neuve -diffuse -perrin -silvio -##ipes -##recht -detonated -iqbal -selma -##nism -##zumi -roasted -##riders -tay -##ados -##mament -##mut -##rud -840 -completes -nipples -cfa -flavour -hirsch -##laus -calderon -sneakers -moravian -##ksha -1622 -rq -294 -##imeters -bodo -##isance -##pre -##ronia -anatomical -excerpt -##lke -dh -kunst -##tablished -##scoe -biomass -panted -unharmed -gael -housemates -montpellier -##59 -coa -rodents -tonic -hickory -singleton -##taro -451 -1719 -aldo -breaststroke -dempsey -och -rocco -##cuit -merton -dissemination -midsummer -serials -##idi -haji -polynomials -##rdon -gs -enoch -prematurely -shutter -taunton -£3 -##grating -##inates -archangel -harassed -##asco -326 -archway -dazzling -##ecin -1736 -sumo -wat -##kovich -1086 -honneur -##ently -##nostic -##ttal -##idon -1605 -403 -1716 -blogger -rents -##gnan -hires -##ikh -##dant -howie -##rons -handler -retracted -shocks -1632 -arun -duluth -kepler -trumpeter -##lary -peeking -seasoned -trooper -##mara -laszlo -##iciencies -##rti -heterosexual -##inatory -##ssion -indira -jogging -##inga -##lism -beit -dissatisfaction -malice -##ately -nedra -peeling -##rgeon -47th -stadiums -475 -vertigo -##ains -iced -restroom -##plify -##tub -illustrating -pear -##chner -##sibility -inorganic -rappers -receipts -watery -##kura -lucinda -##oulos -reintroduced -##8th -##tched -gracefully -saxons -nutritional -wastewater -rained -favourites -bedrock -fisted -hallways -likeness -upscale -##lateral -1580 -blinds -prequel -##pps -##tama -deter -humiliating -restraining -tn -vents -1659 -laundering -recess -rosary -tractors -coulter -federer -##ifiers -##plin -persistence -##quitable -geschichte -pendulum -quakers -##beam -bassett -pictorial -buffet -koln -##sitor -drills -reciprocal -shooters -##57 -##cton -##tees -converge -pip -dmitri -donnelly -yamamoto -aqua -azores -demographics -hypnotic -spitfire -suspend -wryly -roderick -##rran -sebastien -##asurable -mavericks -##fles -##200 -himalayan -prodigy -##iance -transvaal -demonstrators -handcuffs -dodged -mcnamara -sublime -1726 -crazed -##efined -##till -ivo -pondered -reconciled -shrill -sava -##duk -bal -cad -heresy -jaipur -goran -##nished -341 -lux -shelly -whitehall -##hre -israelis -peacekeeping -##wled -1703 -demetrius -ousted -##arians -##zos -beale -anwar -backstroke -raged -shrinking -cremated -##yck -benign -towing -wadi -darmstadt -landfill -parana -soothe -colleen -sidewalks -mayfair -tumble -hepatitis -ferrer -superstructure -##gingly -##urse -##wee -anthropological -translators -##mies -closeness -hooves -##pw -mondays -##roll -##vita -landscaping -##urized -purification -sock -thorns -thwarted -jalan -tiberius -##taka -saline -##rito -confidently -khyber -sculptors -##ij -brahms -hammersmith -inspectors -battista -fivb -fragmentation -hackney -##uls -arresting -exercising -antoinette -bedfordshire -##zily -dyed -##hema -1656 -racetrack -variability -##tique -1655 -austrians -deteriorating -madman -theorists -aix -lehman -weathered -1731 -decreed -eruptions -1729 -flaw -quinlan -sorbonne -flutes -nunez -1711 -adored -downwards -fable -rasped -1712 -moritz -mouthful -renegade -shivers -stunts -dysfunction -restrain -translit -327 -pancakes -##avio -##cision -##tray -351 -vial -##lden -bain -##maid -##oxide -chihuahua -malacca -vimes -##rba -##rnier -1664 -donnie -plaques -##ually -337 -bangs -floppy -huntsville -loretta -nikolay -##otte -eater -handgun -ubiquitous -##hett -eras -zodiac -1634 -##omorphic -1820s -##zog -cochran -##bula -##lithic -warring -##rada -dalai -excused -blazers -mcconnell -reeling -bot -este -##abi -geese -hoax -taxon -##bla -guitarists -##icon -condemning -hunts -inversion -moffat -taekwondo -##lvis -1624 -stammered -##rest -##rzy -sousa -fundraiser -marylebone -navigable -uptown -cabbage -daniela -salman -shitty -whimper -##kian -##utive -programmers -protections -rm -##rmi -##rued -forceful -##enes -fuss -##tao -##wash -brat -oppressive -reykjavik -spartak -ticking -##inkles -##kiewicz -adolph -horst -maui -protege -straighten -cpc -landau -concourse -clements -resultant -##ando -imaginative -joo -reactivated -##rem -##ffled -##uising -consultative -##guide -flop -kaitlyn -mergers -parenting -somber -##vron -supervise -vidhan -##imum -courtship -exemplified -harmonies -medallist -refining -##rrow -##ка -amara -##hum -780 -goalscorer -sited -overshadowed -rohan -displeasure -secretive -multiplied -osman -##orth -engravings -padre -##kali -##veda -miniatures -mis -##yala -clap -pali -rook -##cana -1692 -57th -antennae -astro -oskar -1628 -bulldog -crotch -hackett -yucatan -##sure -amplifiers -brno -ferrara -migrating -##gree -thanking -turing -##eza -mccann -ting -andersson -onslaught -gaines -ganga -incense -standardization -##mation -sentai -scuba -stuffing -turquoise -waivers -alloys -##vitt -regaining -vaults -##clops -##gizing -digger -furry -memorabilia -probing -##iad -payton -rec -deutschland -filippo -opaque -seamen -zenith -afrikaans -##filtration -disciplined -inspirational -##merie -banco -confuse -grafton -tod -##dgets -championed -simi -anomaly -biplane -##ceptive -electrode -##para -1697 -cleavage -crossbow -swirl -informant -##lars -##osta -afi -bonfire -spec -##oux -lakeside -slump -##culus -##lais -##qvist -##rrigan -1016 -facades -borg -inwardly -cervical -xl -pointedly -050 -stabilization -##odon -chests -1699 -hacked -ctv -orthogonal -suzy -##lastic -gaulle -jacobite -rearview -##cam -##erted -ashby -##drik -##igate -##mise -##zbek -affectionately -canine -disperse -latham -##istles -##ivar -spielberg -##orin -##idium -ezekiel -cid -##sg -durga -middletown -##cina -customized -frontiers -harden -##etano -##zzy -1604 -bolsheviks -##66 -coloration -yoko -##bedo -briefs -slabs -debra -liquidation -plumage -##oin -blossoms -dementia -subsidy -1611 -proctor -relational -jerseys -parochial -ter -##ici -esa -peshawar -cavalier -loren -cpi -idiots -shamrock -1646 -dutton -malabar -mustache -##endez -##ocytes -referencing -terminates -marche -yarmouth -##sop -acton -mated -seton -subtly -baptised -beige -extremes -jolted -kristina -telecast -##actic -safeguard -waldo -##baldi -##bular -endeavors -sloppy -subterranean -##ensburg -##itung -delicately -pigment -tq -##scu -1626 -##ound -collisions -coveted -herds -##personal -##meister -##nberger -chopra -##ricting -abnormalities -defective -galician -lucie -##dilly -alligator -likened -##genase -burundi -clears -complexion -derelict -deafening -diablo -fingered -champaign -dogg -enlist -isotope -labeling -mrna -##erre -brilliance -marvelous -##ayo -1652 -crawley -ether -footed -dwellers -deserts -hamish -rubs -warlock -skimmed -##lizer -870 -buick -embark -heraldic -irregularities -##ajan -kiara -##kulam -##ieg -antigen -kowalski -##lge -oakley -visitation -##mbit -vt -##suit -1570 -murderers -##miento -##rites -chimneys -##sling -condemn -custer -exchequer -havre -##ghi -fluctuations -##rations -dfb -hendricks -vaccines -##tarian -nietzsche -biking -juicy -##duced -brooding -scrolling -selangor -##ragan -352 -annum -boomed -seminole -sugarcane -##dna -departmental -dismissing -innsbruck -arteries -ashok -batavia -daze -kun -overtook -##rga -##tlan -beheaded -gaddafi -holm -electronically -faulty -galilee -fractures -kobayashi -##lized -gunmen -magma -aramaic -mala -eastenders -inference -messengers -bf -##qu -407 -bathrooms -##vere -1658 -flashbacks -ideally -misunderstood -##jali -##weather -mendez -##grounds -505 -uncanny -##iii -1709 -friendships -##nbc -sacrament -accommodated -reiterated -logistical -pebbles -thumped -##escence -administering -decrees -drafts -##flight -##cased -##tula -futuristic -picket -intimidation -winthrop -##fahan -interfered -339 -afar -francoise -morally -uta -cochin -croft -dwarfs -##bruck -##dents -##nami -biker -##hner -##meral -nano -##isen -##ometric -##pres -##ан -brightened -meek -parcels -securely -gunners -##jhl -##zko -agile -hysteria -##lten -##rcus -bukit -champs -chevy -cuckoo -leith -sadler -theologians -welded -##section -1663 -jj -plurality -xander -##rooms -##formed -shredded -temps -intimately -pau -tormented -##lok -##stellar -1618 -charred -ems -essen -##mmel -alarms -spraying -ascot -blooms -twinkle -##abia -##apes -internment -obsidian -##chaft -snoop -##dav -##ooping -malibu -##tension -quiver -##itia -hays -mcintosh -travers -walsall -##ffie -1623 -beverley -schwarz -plunging -structurally -m3 -rosenthal -vikram -##tsk -770 -ghz -##onda -##tiv -chalmers -groningen -pew -reckon -unicef -##rvis -55th -##gni -1651 -sulawesi -avila -cai -metaphysical -screwing -turbulence -##mberg -augusto -samba -56th -baffled -momentary -toxin -##urian -##wani -aachen -condoms -dali -steppe -##3d -##app -##oed -##year -adolescence -dauphin -electrically -inaccessible -microscopy -nikita -##ega -atv -##cel -##enter -##oles -##oteric -##ы -accountants -punishments -wrongly -bribes -adventurous -clinch -flinders -southland -##hem -##kata -gough -##ciency -lads -soared -##ה -undergoes -deformation -outlawed -rubbish -##arus -##mussen -##nidae -##rzburg -arcs -##ingdon -##tituted -1695 -wheelbase -wheeling -bombardier -campground -zebra -##lices -##oj -##bain -lullaby -##ecure -donetsk -wylie -grenada -##arding -##ης -squinting -eireann -opposes -##andra -maximal -runes -##broken -##cuting -##iface -##ror -##rosis -additive -britney -adultery -triggering -##drome -detrimental -aarhus -containment -jc -swapped -vichy -##ioms -madly -##oric -##rag -brant -##ckey -##trix -1560 -1612 -broughton -rustling -##stems -##uder -asbestos -mentoring -##nivorous -finley -leaps -##isan -apical -pry -slits -substitutes -##dict -intuitive -fantasia -insistent -unreasonable -##igen -##vna -domed -hannover -margot -ponder -##zziness -impromptu -jian -lc -rampage -stemming -##eft -andrey -gerais -whichever -amnesia -appropriated -anzac -clicks -modifying -ultimatum -cambrian -maids -verve -yellowstone -##mbs -conservatoire -##scribe -adherence -dinners -spectra -imperfect -mysteriously -sidekick -tatar -tuba -##aks -##ifolia -distrust -##athan -##zle -c2 -ronin -zac -##pse -celaena -instrumentalist -scents -skopje -##mbling -comical -compensated -vidal -condor -intersect -jingle -wavelengths -##urrent -mcqueen -##izzly -carp -weasel -422 -kanye -militias -postdoctoral -eugen -gunslinger -##ɛ -faux -hospice -##for -appalled -derivation -dwarves -##elis -dilapidated -##folk -astoria -philology -##lwyn -##otho -##saka -inducing -philanthropy -##bf -##itative -geek -markedly -sql -##yce -bessie -indices -rn -##flict -495 -frowns -resolving -weightlifting -tugs -cleric -contentious -1653 -mania -rms -##miya -##reate -##ruck -##tucket -bien -eels -marek -##ayton -##cence -discreet -unofficially -##ife -leaks -##bber -1705 -332 -dung -compressor -hillsborough -pandit -shillings -distal -##skin -381 -##tat -##you -nosed -##nir -mangrove -undeveloped -##idia -textures -##inho -##500 -##rise -ae -irritating -nay -amazingly -bancroft -apologetic -compassionate -kata -symphonies -##lovic -airspace -##lch -930 -gifford -precautions -fulfillment -sevilla -vulgar -martinique -##urities -looting -piccolo -tidy -##dermott -quadrant -armchair -incomes -mathematicians -stampede -nilsson -##inking -##scan -foo -quarterfinal -##ostal -shang -shouldered -squirrels -##owe -344 -vinegar -##bner -##rchy -##systems -delaying -##trics -ars -dwyer -rhapsody -sponsoring -##gration -bipolar -cinder -starters -##olio -##urst -421 -signage -##nty -aground -figurative -mons -acquaintances -duets -erroneously -soyuz -elliptic -recreated -##cultural -##quette -##ssed -##tma -##zcz -moderator -scares -##itaire -##stones -##udence -juniper -sighting -##just -##nsen -britten -calabria -ry -bop -cramer -forsyth -stillness -##л -airmen -gathers -unfit -##umber -##upt -taunting -##rip -seeker -streamlined -##bution -holster -schumann -tread -vox -##gano -##onzo -strive -dil -reforming -covent -newbury -predicting -##orro -decorate -tre -##puted -andover -ie -asahi -dept -dunkirk -gills -##tori -buren -huskies -##stis -##stov -abstracts -bets -loosen -##opa -1682 -yearning -##glio -##sir -berman -effortlessly -enamel -napoli -persist -##peration -##uez -attache -elisa -b1 -invitations -##kic -accelerating -reindeer -boardwalk -clutches -nelly -polka -starbucks -##kei -adamant -huey -lough -unbroken -adventurer -embroidery -inspecting -stanza -##ducted -naia -taluka -##pone -##roids -chases -deprivation -florian -##jing -##ppet -earthly -##lib -##ssee -colossal -foreigner -vet -freaks -patrice -rosewood -triassic -upstate -##pkins -dominates -ata -chants -ks -vo -##400 -##bley -##raya -##rmed -555 -agra -infiltrate -##ailing -##ilation -##tzer -##uppe -##werk -binoculars -enthusiast -fujian -squeak -##avs -abolitionist -almeida -boredom -hampstead -marsden -rations -##ands -inflated -334 -bonuses -rosalie -patna -##rco -329 -detachments -penitentiary -54th -flourishing -woolf -##dion -##etched -papyrus -##lster -##nsor -##toy -bobbed -dismounted -endelle -inhuman -motorola -tbs -wince -wreath -##ticus -hideout -inspections -sanjay -disgrace -infused -pudding -stalks -##urbed -arsenic -leases -##hyl -##rrard -collarbone -##waite -##wil -dowry -##bant -##edance -genealogical -nitrate -salamanca -scandals -thyroid -necessitated -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##{ -##| -##} -##~ -##¡ -##¢ -##£ -##¤ -##¥ -##¦ -##§ -##¨ -##© -##ª -##« -##¬ -##® -##± -##´ -##µ -##¶ -##· -##º -##» -##¼ -##¾ -##¿ -##æ -##ð -##÷ -##þ -##đ -##ħ -##ŋ -##œ -##ƒ -##ɐ -##ɑ -##ɒ -##ɔ -##ɕ -##ə -##ɡ -##ɣ -##ɨ -##ɪ -##ɫ -##ɬ -##ɯ -##ɲ -##ɴ -##ɹ -##ɾ -##ʀ -##ʁ -##ʂ -##ʃ -##ʉ -##ʊ -##ʋ -##ʌ -##ʎ -##ʐ -##ʑ -##ʒ -##ʔ -##ʰ -##ʲ -##ʳ -##ʷ -##ʸ -##ʻ -##ʼ -##ʾ -##ʿ -##ˈ -##ˡ -##ˢ -##ˣ -##ˤ -##β -##γ -##δ -##ε -##ζ -##θ -##κ -##λ -##μ -##ξ -##ο -##π -##ρ -##σ -##τ -##υ -##φ -##χ -##ψ -##ω -##б -##г -##д -##ж -##з -##м -##п -##с -##у -##ф -##х -##ц -##ч -##ш -##щ -##ъ -##э -##ю -##ђ -##є -##і -##ј -##љ -##њ -##ћ -##ӏ -##ա -##բ -##գ -##դ -##ե -##թ -##ի -##լ -##կ -##հ -##մ -##յ -##ն -##ո -##պ -##ս -##վ -##տ -##ր -##ւ -##ք -##־ -##א -##ב -##ג -##ד -##ו -##ז -##ח -##ט -##י -##ך -##כ -##ל -##ם -##מ -##ן -##נ -##ס -##ע -##ף -##פ -##ץ -##צ -##ק -##ר -##ש -##ת -##، -##ء -##ب -##ت -##ث -##ج -##ح -##خ -##ذ -##ز -##س -##ش -##ص -##ض -##ط -##ظ -##ع -##غ -##ـ -##ف -##ق -##ك -##و -##ى -##ٹ -##پ -##چ -##ک -##گ -##ں -##ھ -##ہ -##ے -##अ -##आ -##उ -##ए -##क -##ख -##ग -##च -##ज -##ट -##ड -##ण -##त -##थ -##द -##ध -##न -##प -##ब -##भ -##म -##य -##र -##ल -##व -##श -##ष -##स -##ह -##ा -##ि -##ी -##ो -##। -##॥ -##ং -##অ -##আ -##ই -##উ -##এ -##ও -##ক -##খ -##গ -##চ -##ছ -##জ -##ট -##ড -##ণ -##ত -##থ -##দ -##ধ -##ন -##প -##ব -##ভ -##ম -##য -##র -##ল -##শ -##ষ -##স -##হ -##া -##ি -##ী -##ে -##க -##ச -##ட -##த -##ந -##ன -##ப -##ம -##ய -##ர -##ல -##ள -##வ -##ா -##ி -##ு -##ே -##ை -##ನ -##ರ -##ಾ -##ක -##ය -##ර -##ල -##ව -##ා -##ก -##ง -##ต -##ท -##น -##พ -##ม -##ย -##ร -##ล -##ว -##ส -##อ -##า -##เ -##་ -##། -##ག -##ང -##ད -##ན -##པ -##བ -##མ -##འ -##ར -##ལ -##ས -##မ -##ა -##ბ -##გ -##დ -##ე -##ვ -##თ -##ი -##კ -##ლ -##მ -##ნ -##ო -##რ -##ს -##ტ -##უ -##ᄀ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄉ -##ᄊ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅡ -##ᅢ -##ᅥ -##ᅦ -##ᅧ -##ᅩ -##ᅪ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᅵ -##ᆨ -##ᆫ -##ᆯ -##ᆷ -##ᆸ -##ᆼ -##ᴬ -##ᴮ -##ᴰ -##ᴵ -##ᴺ -##ᵀ -##ᵃ -##ᵇ -##ᵈ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵖ -##ᵗ -##ᵘ -##ᵣ -##ᵤ -##ᵥ -##ᶜ -##ᶠ -##‐ -##‑ -##‒ -##– -##— -##― -##‖ -##‘ -##’ -##‚ -##“ -##” -##„ -##† -##‡ -##• -##… -##‰ -##′ -##″ -##› -##‿ -##⁄ -##⁰ -##ⁱ -##⁴ -##⁵ -##⁶ -##⁷ -##⁸ -##⁹ -##⁻ -##ⁿ -##₅ -##₆ -##₇ -##₈ -##₉ -##₊ -##₍ -##₎ -##ₐ -##ₑ -##ₒ -##ₓ -##ₕ -##ₖ -##ₗ -##ₘ -##ₚ -##ₛ -##ₜ -##₤ -##₩ -##€ -##₱ -##₹ -##ℓ -##№ -##ℝ -##™ -##⅓ -##⅔ -##← -##↑ -##→ -##↓ -##↔ -##↦ -##⇄ -##⇌ -##⇒ -##∂ -##∅ -##∆ -##∇ -##∈ -##∗ -##∘ -##√ -##∞ -##∧ -##∨ -##∩ -##∪ -##≈ -##≡ -##≤ -##≥ -##⊂ -##⊆ -##⊕ -##⊗ -##⋅ -##─ -##│ -##■ -##▪ -##● -##★ -##☆ -##☉ -##♠ -##♣ -##♥ -##♦ -##♯ -##⟨ -##⟩ -##ⱼ -##⺩ -##⺼ -##⽥ -##、 -##。 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##〜 -##あ -##い -##う -##え -##お -##か -##き -##く -##け -##こ -##さ -##し -##す -##せ -##そ -##た -##ち -##っ -##つ -##て -##と -##な -##に -##ぬ -##ね -##の -##は -##ひ -##ふ -##へ -##ほ -##ま -##み -##む -##め -##も -##や -##ゆ -##よ -##ら -##り -##る -##れ -##ろ -##を -##ん -##ァ -##ア -##ィ -##イ -##ウ -##ェ -##エ -##オ -##カ -##キ -##ク -##ケ -##コ -##サ -##シ -##ス -##セ -##タ -##チ -##ッ -##ツ -##テ -##ト -##ナ -##ニ -##ノ -##ハ -##ヒ -##フ -##ヘ -##ホ -##マ -##ミ -##ム -##メ -##モ -##ャ -##ュ -##ョ -##ラ -##リ -##ル -##レ -##ロ -##ワ -##ン -##・ -##ー -##一 -##三 -##上 -##下 -##不 -##世 -##中 -##主 -##久 -##之 -##也 -##事 -##二 -##五 -##井 -##京 -##人 -##亻 -##仁 -##介 -##代 -##仮 -##伊 -##会 -##佐 -##侍 -##保 -##信 -##健 -##元 -##光 -##八 -##公 -##内 -##出 -##分 -##前 -##劉 -##力 -##加 -##勝 -##北 -##区 -##十 -##千 -##南 -##博 -##原 -##口 -##古 -##史 -##司 -##合 -##吉 -##同 -##名 -##和 -##囗 -##四 -##国 -##國 -##土 -##地 -##坂 -##城 -##堂 -##場 -##士 -##夏 -##外 -##大 -##天 -##太 -##夫 -##奈 -##女 -##子 -##学 -##宀 -##宇 -##安 -##宗 -##定 -##宣 -##宮 -##家 -##宿 -##寺 -##將 -##小 -##尚 -##山 -##岡 -##島 -##崎 -##川 -##州 -##巿 -##帝 -##平 -##年 -##幸 -##广 -##弘 -##張 -##彳 -##後 -##御 -##德 -##心 -##忄 -##志 -##忠 -##愛 -##成 -##我 -##戦 -##戸 -##手 -##扌 -##政 -##文 -##新 -##方 -##日 -##明 -##星 -##春 -##昭 -##智 -##曲 -##書 -##月 -##有 -##朝 -##木 -##本 -##李 -##村 -##東 -##松 -##林 -##森 -##楊 -##樹 -##橋 -##歌 -##止 -##正 -##武 -##比 -##氏 -##民 -##水 -##氵 -##氷 -##永 -##江 -##沢 -##河 -##治 -##法 -##海 -##清 -##漢 -##瀬 -##火 -##版 -##犬 -##王 -##生 -##田 -##男 -##疒 -##発 -##白 -##的 -##皇 -##目 -##相 -##省 -##真 -##石 -##示 -##社 -##神 -##福 -##禾 -##秀 -##秋 -##空 -##立 -##章 -##竹 -##糹 -##美 -##義 -##耳 -##良 -##艹 -##花 -##英 -##華 -##葉 -##藤 -##行 -##街 -##西 -##見 -##訁 -##語 -##谷 -##貝 -##貴 -##車 -##軍 -##辶 -##道 -##郎 -##郡 -##部 -##都 -##里 -##野 -##金 -##鈴 -##镇 -##長 -##門 -##間 -##阝 -##阿 -##陳 -##陽 -##雄 -##青 -##面 -##風 -##食 -##香 -##馬 -##高 -##龍 -##龸 -##fi -##fl -##! -##( -##) -##, -##- -##. -##/ -##: -##? -##~