From 489a6c5f684980e7dd49b589c4c6ba85e64c2306 Mon Sep 17 00:00:00 2001 From: chenxuanhong Date: Sat, 19 Mar 2022 00:43:03 +0800 Subject: [PATCH] arcface --- arcface_torch/.gitignore | 5 + arcface_torch/README.md | 136 + arcface_torch/backbones/__init__.py | 25 + arcface_torch/backbones/iresnet.py | 186 + arcface_torch/backbones/iresnet2060.py | 176 + arcface_torch/backbones/mobilefacenet.py | 130 + arcface_torch/configs/3millions.py | 22 + arcface_torch/configs/__init__.py | 0 arcface_torch/configs/base.py | 47 + .../configs/glint360k_mobileface_lr02_bs4k.py | 27 + .../glint360k_r100_lr02_bs4k_16gpus.py | 27 + .../configs/ms1mv3_mobileface_lr02.py | 27 + arcface_torch/configs/ms1mv3_r100_lr02.py | 27 + arcface_torch/configs/ms1mv3_r50_lr02.py | 27 + ...face42m_mobilefacenet_pfc02_bs8k_16gpus.py | 27 + .../webface42m_r100_lr01_pfc02_bs4k_16gpus.py | 27 + .../webface42m_r50_lr01_pfc02_bs4k_32gpus.py | 27 + .../webface42m_r50_lr01_pfc02_bs4k_8gpus.py | 27 + .../webface42m_r50_lr01_pfc02_bs8k_16gpus.py | 27 + arcface_torch/dataset.py | 209 + arcface_torch/docs/eval.md | 31 + arcface_torch/docs/install.md | 51 + arcface_torch/docs/install_dali.md | 1 + arcface_torch/docs/modelzoo.md | 0 arcface_torch/docs/prepare_webface42m.md | 22 + arcface_torch/docs/speed_benchmark.md | 93 + arcface_torch/eval/__init__.py | 0 arcface_torch/eval/verification.py | 409 + arcface_torch/eval_ijbc.py | 483 ++ arcface_torch/inference.py | 35 + arcface_torch/losses.py | 47 + arcface_torch/lr_scheduler.py | 29 + arcface_torch/onnx_helper.py | 250 + arcface_torch/onnx_ijbc.py | 269 + arcface_torch/partial_fc.py | 330 + arcface_torch/requirement.txt | 5 + arcface_torch/run.sh | 9 + arcface_torch/torch2onnx.py | 53 + arcface_torch/train.py | 161 + arcface_torch/utils/__init__.py | 0 arcface_torch/utils/plot.py | 71 + arcface_torch/utils/utils_callbacks.py | 110 + arcface_torch/utils/utils_config.py | 16 + arcface_torch/utils/utils_logging.py | 41 + breakpoint.json | 4 +- check_list.txt | 258 + test_arcface.py | 17 + train_scripts/trainer_multi_gpu.py | 12 +- vggface2hq_failed.txt | 6728 +++++++++++++++++ 49 files changed, 10736 insertions(+), 5 deletions(-) create mode 100644 arcface_torch/.gitignore create mode 100644 arcface_torch/README.md create mode 100644 arcface_torch/backbones/__init__.py create mode 100644 arcface_torch/backbones/iresnet.py create mode 100644 arcface_torch/backbones/iresnet2060.py create mode 100644 arcface_torch/backbones/mobilefacenet.py create mode 100644 arcface_torch/configs/3millions.py create mode 100644 arcface_torch/configs/__init__.py create mode 100644 arcface_torch/configs/base.py create mode 100644 arcface_torch/configs/glint360k_mobileface_lr02_bs4k.py create mode 100644 arcface_torch/configs/glint360k_r100_lr02_bs4k_16gpus.py create mode 100644 arcface_torch/configs/ms1mv3_mobileface_lr02.py create mode 100644 arcface_torch/configs/ms1mv3_r100_lr02.py create mode 100644 arcface_torch/configs/ms1mv3_r50_lr02.py create mode 100644 arcface_torch/configs/webface42m_mobilefacenet_pfc02_bs8k_16gpus.py create mode 100644 arcface_torch/configs/webface42m_r100_lr01_pfc02_bs4k_16gpus.py create mode 100644 arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_32gpus.py create mode 100644 arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_8gpus.py create mode 100644 arcface_torch/configs/webface42m_r50_lr01_pfc02_bs8k_16gpus.py create mode 100644 arcface_torch/dataset.py create mode 100644 arcface_torch/docs/eval.md create mode 100644 arcface_torch/docs/install.md create mode 100644 arcface_torch/docs/install_dali.md create mode 100644 arcface_torch/docs/modelzoo.md create mode 100644 arcface_torch/docs/prepare_webface42m.md create mode 100644 arcface_torch/docs/speed_benchmark.md create mode 100644 arcface_torch/eval/__init__.py create mode 100644 arcface_torch/eval/verification.py create mode 100644 arcface_torch/eval_ijbc.py create mode 100644 arcface_torch/inference.py create mode 100644 arcface_torch/losses.py create mode 100644 arcface_torch/lr_scheduler.py create mode 100644 arcface_torch/onnx_helper.py create mode 100644 arcface_torch/onnx_ijbc.py create mode 100644 arcface_torch/partial_fc.py create mode 100644 arcface_torch/requirement.txt create mode 100644 arcface_torch/run.sh create mode 100644 arcface_torch/torch2onnx.py create mode 100644 arcface_torch/train.py create mode 100644 arcface_torch/utils/__init__.py create mode 100644 arcface_torch/utils/plot.py create mode 100644 arcface_torch/utils/utils_callbacks.py create mode 100644 arcface_torch/utils/utils_config.py create mode 100644 arcface_torch/utils/utils_logging.py create mode 100644 test_arcface.py diff --git a/arcface_torch/.gitignore b/arcface_torch/.gitignore new file mode 100644 index 0000000..e3b9c68 --- /dev/null +++ b/arcface_torch/.gitignore @@ -0,0 +1,5 @@ +**__pycache__/ +.vscode +bak*/ +work_dirs/ +models/ \ No newline at end of file diff --git a/arcface_torch/README.md b/arcface_torch/README.md new file mode 100644 index 0000000..efe5c21 --- /dev/null +++ b/arcface_torch/README.md @@ -0,0 +1,136 @@ +# Distributed Arcface Training in Pytorch + +This is a deep learning library that makes face recognition efficient, and effective, which can train tens of millions +identity on a single server. + +## Requirements + +- Install [PyTorch](http://pytorch.org) (torch>=1.6.0), our doc for [install.md](docs/install.md). +- (Optional) Install [DALI](https://docs.nvidia.com/deeplearning/dali/user-guide/docs/), our doc for [install_dali.md](docs/install_dali.md). +- `pip install -r requirement.txt`. + +## How to Training + +To train a model, run `train.py` with the path to the configs. +The example commands below show how to run +distributed training. + +### 1. To run on a machine with 8 GPUs: + +```shell +python -m torch.distributed.launch --nproc_per_node=8 --nnodes=1 --node_rank=0 --master_addr="127.0.0.1" --master_port=12581 train.py configs/ms1mv3_r50_lr02 +``` + +### 2. To run on 2 machines with 8 GPUs each: + +Node 0: + +```shell +python -m torch.distributed.launch --nproc_per_node=8 --nnodes=2 --node_rank=0 --master_addr="ip1" --master_port=12581 train.py configs/webface42m_r100_lr01_pfc02_bs4k_16gpus +``` + +Node 1: + +```shell +python -m torch.distributed.launch --nproc_per_node=8 --nnodes=2 --node_rank=1 --master_addr="ip1" --master_port=12581 train.py configs/webface42m_r100_lr01_pfc02_bs4k_16gpus +``` + +## Download Datasets or Prepare Datasets + +- [MS1MV3](https://github.com/deepinsight/insightface/tree/master/recognition/_datasets_#ms1m-retinaface) (93k IDs, 5.2M images) +- [Glint360K](https://github.com/deepinsight/insightface/tree/master/recognition/partial_fc#4-download) (360k IDs, 17.1M images) +- [WebFace42M](docs/prepare_webface42m.md) (2M IDs, 42.5M images) + +## Model Zoo + +- The models are available for non-commercial research purposes only. +- All models can be found in here. +- [Baidu Yun Pan](https://pan.baidu.com/s/1CL-l4zWqsI1oDuEEYVhj-g): e8pw +- [OneDrive](https://1drv.ms/u/s!AswpsDO2toNKq0lWY69vN58GR6mw?e=p9Ov5d) + +### Performance on IJB-C and [**ICCV2021-MFR**](https://github.com/deepinsight/insightface/blob/master/challenges/mfr/README.md) + +ICCV2021-MFR testset consists of non-celebrities so we can ensure that it has very few overlap with public available face +recognition training set, such as MS1M and CASIA as they mostly collected from online celebrities. +As the result, we can evaluate the FAIR performance for different algorithms. + +For **ICCV2021-MFR-ALL** set, TAR is measured on all-to-all 1:1 protocal, with FAR less than 0.000001(e-6). The +globalised multi-racial testset contains 242,143 identities and 1,624,305 images. + + + +| Datasets | Backbone | **MFR-ALL** | IJB-C(1E-4) | IJB-C(1E-5) | Training Throughout | log | +|:-------------------------|:-----------|:------------|:------------|:------------|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| MS1MV3 | mobileface | 65.76 | 94.44 | 91.85 | ~13000 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/ms1mv3_mobileface_lr02/training.log)\|[config](configs/ms1mv3_mobileface_lr02.py) | +| Glint360K | mobileface | 69.83 | 95.17 | 92.58 | -11000 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/glint360k_mobileface_lr02_bs4k/training.log)\|[config](configs/glint360k_mobileface_lr02_bs4k.py) | +| WebFace42M-PartialFC-0.2 | mobileface | 73.80 | 95.40 | 92.64 | (16GPUs)~18583 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/webface42m_mobilefacenet_pfc02_bs8k_16gpus/training.log)\|[config](configs/webface42m_mobilefacenet_pfc02_bs8k_16gpus.py) | +| MS1MV3 | r100 | 83.23 | 96.88 | 95.31 | ~3400 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/ms1mv3_r100_lr02/training.log)\|[config](configs/ms1mv3_r100_lr02.py) | +| Glint360K | r100 | 90.86 | 97.53 | 96.43 | ~5000 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/glint360k_r100_lr02_bs4k_16gpus/training.log)\|[config](configs/glint360k_r100_lr02_bs4k_16gpus.py) | +| WebFace42M-PartialFC-0.2 | r50(bs4k) | 93.83 | 97.53 | 96.16 | (8 GPUs)~5900 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/webface42m_r50_bs4k_pfc02/training.log)\|[config](configs/webface42m_r50_lr01_pfc02_bs4k_8gpus.py) | +| WebFace42M-PartialFC-0.2 | r50(bs8k) | 93.96 | 97.46 | 96.12 | (16GPUs)~11000 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/webface42m_r50_lr01_pfc02_bs8k_16gpus/training.log)\|[config](configs/webface42m_r50_lr01_pfc02_bs8k_16gpus.py) | +| WebFace42M-PartialFC-0.2 | r50(bs4k) | 94.04 | 97.48 | 95.94 | (32GPUs)~17000 | log\|[config](configs/webface42m_r50_lr01_pfc02_bs4k_32gpus.py) | +| WebFace42M-PartialFC-0.2 | r100(bs4k) | 96.69 | 97.85 | 96.63 | (16GPUs)~5200 | [log](https://raw.githubusercontent.com/anxiangsir/insightface_arcface_log/master/webface42m_r100_bs4k_pfc02/training.log)\|[config](configs/webface42m_r100_lr01_pfc02_bs4k_16gpus.py) | +| WebFace42M-PartialFC-0.2 | r200 | - | - | - | - | log\|config | + +`PartialFC-0.2` means negivate class centers sample rate is 0.2. + + +## Speed Benchmark + +`arcface_torch` can train large-scale face recognition training set efficiently and quickly. When the number of +classes in training sets is greater than 1 Million, partial fc sampling strategy will get same +accuracy with several times faster training performance and smaller GPU memory. +Partial FC is a sparse variant of the model parallel architecture for large sacle face recognition. Partial FC use a +sparse softmax, where each batch dynamicly sample a subset of class centers for training. In each iteration, only a +sparse part of the parameters will be updated, which can reduce a lot of GPU memory and calculations. With Partial FC, +we can scale trainset of 29 millions identities, the largest to date. Partial FC also supports multi-machine distributed +training and mixed precision training. + +![Image text](https://github.com/anxiangsir/insightface_arcface_log/blob/master/partial_fc_v2.png) + +More details see +[speed_benchmark.md](docs/speed_benchmark.md) in docs. + +### 1. Training speed of different parallel methods (samples / second), Tesla V100 32GB * 8. (Larger is better) + +`-` means training failed because of gpu memory limitations. + +| Number of Identities in Dataset | Data Parallel | Model Parallel | Partial FC 0.1 | +|:--------------------------------|:--------------|:---------------|:---------------| +| 125000 | 4681 | 4824 | 5004 | +| 1400000 | **1672** | 3043 | 4738 | +| 5500000 | **-** | **1389** | 3975 | +| 8000000 | **-** | **-** | 3565 | +| 16000000 | **-** | **-** | 2679 | +| 29000000 | **-** | **-** | **1855** | + +### 2. GPU memory cost of different parallel methods (MB per GPU), Tesla V100 32GB * 8. (Smaller is better) + +| Number of Identities in Dataset | Data Parallel | Model Parallel | Partial FC 0.1 | +|:--------------------------------|:--------------|:---------------|:---------------| +| 125000 | 7358 | 5306 | 4868 | +| 1400000 | 32252 | 11178 | 6056 | +| 5500000 | **-** | 32188 | 9854 | +| 8000000 | **-** | **-** | 12310 | +| 16000000 | **-** | **-** | 19950 | +| 29000000 | **-** | **-** | 32324 | + + +## Citations + +``` +@inproceedings{deng2019arcface, + title={Arcface: Additive angular margin loss for deep face recognition}, + author={Deng, Jiankang and Guo, Jia and Xue, Niannan and Zafeiriou, Stefanos}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={4690--4699}, + year={2019} +} +@inproceedings{an2020partical_fc, + title={Partial FC: Training 10 Million Identities on a Single Machine}, + author={An, Xiang and Zhu, Xuhan and Xiao, Yang and Wu, Lan and Zhang, Ming and Gao, Yuan and Qin, Bin and + Zhang, Debing and Fu Ying}, + booktitle={Arxiv 2010.05222}, + year={2020} +} +``` diff --git a/arcface_torch/backbones/__init__.py b/arcface_torch/backbones/__init__.py new file mode 100644 index 0000000..55bd4c5 --- /dev/null +++ b/arcface_torch/backbones/__init__.py @@ -0,0 +1,25 @@ +from .iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200 +from .mobilefacenet import get_mbf + + +def get_model(name, **kwargs): + # resnet + if name == "r18": + return iresnet18(False, **kwargs) + elif name == "r34": + return iresnet34(False, **kwargs) + elif name == "r50": + return iresnet50(False, **kwargs) + elif name == "r100": + return iresnet100(False, **kwargs) + elif name == "r200": + return iresnet200(False, **kwargs) + elif name == "r2060": + from .iresnet2060 import iresnet2060 + return iresnet2060(False, **kwargs) + elif name == "mbf": + fp16 = kwargs.get("fp16", False) + num_features = kwargs.get("num_features", 512) + return get_mbf(fp16=fp16, num_features=num_features) + else: + raise ValueError() \ No newline at end of file diff --git a/arcface_torch/backbones/iresnet.py b/arcface_torch/backbones/iresnet.py new file mode 100644 index 0000000..ebd6075 --- /dev/null +++ b/arcface_torch/backbones/iresnet.py @@ -0,0 +1,186 @@ +import torch +from torch import nn + +__all__ = ['iresnet18', 'iresnet34', 'iresnet50', 'iresnet100', 'iresnet200'] + + +def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, + out_planes, + kernel_size=3, + stride=stride, + padding=dilation, + groups=groups, + bias=False, + dilation=dilation) + + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution""" + return nn.Conv2d(in_planes, + out_planes, + kernel_size=1, + stride=stride, + bias=False) + + +class IBasicBlock(nn.Module): + expansion = 1 + def __init__(self, inplanes, planes, stride=1, downsample=None, + groups=1, base_width=64, dilation=1): + super(IBasicBlock, self).__init__() + if groups != 1 or base_width != 64: + raise ValueError('BasicBlock only supports groups=1 and base_width=64') + if dilation > 1: + raise NotImplementedError("Dilation > 1 not supported in BasicBlock") + self.bn1 = nn.BatchNorm2d(inplanes, eps=1e-05,) + self.conv1 = conv3x3(inplanes, planes) + self.bn2 = nn.BatchNorm2d(planes, eps=1e-05,) + self.prelu = nn.PReLU(planes) + self.conv2 = conv3x3(planes, planes, stride) + self.bn3 = nn.BatchNorm2d(planes, eps=1e-05,) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + out = self.bn1(x) + out = self.conv1(out) + out = self.bn2(out) + out = self.prelu(out) + out = self.conv2(out) + out = self.bn3(out) + if self.downsample is not None: + identity = self.downsample(x) + out += identity + return out + + +class IResNet(nn.Module): + fc_scale = 7 * 7 + def __init__(self, + block, layers, dropout=0, num_features=512, zero_init_residual=False, + groups=1, width_per_group=64, replace_stride_with_dilation=None, fp16=False): + super(IResNet, self).__init__() + self.fp16 = fp16 + self.inplanes = 64 + self.dilation = 1 + if replace_stride_with_dilation is None: + replace_stride_with_dilation = [False, False, False] + if len(replace_stride_with_dilation) != 3: + raise ValueError("replace_stride_with_dilation should be None " + "or a 3-element tuple, got {}".format(replace_stride_with_dilation)) + self.groups = groups + self.base_width = width_per_group + self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(self.inplanes, eps=1e-05) + self.prelu = nn.PReLU(self.inplanes) + self.layer1 = self._make_layer(block, 64, layers[0], stride=2) + self.layer2 = self._make_layer(block, + 128, + layers[1], + stride=2, + dilate=replace_stride_with_dilation[0]) + self.layer3 = self._make_layer(block, + 256, + layers[2], + stride=2, + dilate=replace_stride_with_dilation[1]) + self.layer4 = self._make_layer(block, + 512, + layers[3], + stride=2, + dilate=replace_stride_with_dilation[2]) + self.bn2 = nn.BatchNorm2d(512 * block.expansion, eps=1e-05,) + self.dropout = nn.Dropout(p=dropout, inplace=True) + self.fc = nn.Linear(512 * block.expansion * self.fc_scale, num_features) + self.features = nn.BatchNorm1d(num_features, eps=1e-05) + nn.init.constant_(self.features.weight, 1.0) + self.features.weight.requires_grad = False + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.normal_(m.weight, 0, 0.1) + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, IBasicBlock): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1, dilate=False): + downsample = None + previous_dilation = self.dilation + if dilate: + self.dilation *= stride + stride = 1 + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + nn.BatchNorm2d(planes * block.expansion, eps=1e-05, ), + ) + layers = [] + layers.append( + block(self.inplanes, planes, stride, downsample, self.groups, + self.base_width, previous_dilation)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append( + block(self.inplanes, + planes, + groups=self.groups, + base_width=self.base_width, + dilation=self.dilation)) + + return nn.Sequential(*layers) + + def forward(self, x): + with torch.cuda.amp.autocast(self.fp16): + x = self.conv1(x) + x = self.bn1(x) + x = self.prelu(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.bn2(x) + x = torch.flatten(x, 1) + x = self.dropout(x) + x = self.fc(x.float() if self.fp16 else x) + x = self.features(x) + return x + + +def _iresnet(arch, block, layers, pretrained, progress, **kwargs): + model = IResNet(block, layers, **kwargs) + if pretrained: + raise ValueError() + return model + + +def iresnet18(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet18', IBasicBlock, [2, 2, 2, 2], pretrained, + progress, **kwargs) + + +def iresnet34(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet34', IBasicBlock, [3, 4, 6, 3], pretrained, + progress, **kwargs) + + +def iresnet50(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet50', IBasicBlock, [3, 4, 14, 3], pretrained, + progress, **kwargs) + + +def iresnet100(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet100', IBasicBlock, [3, 13, 30, 3], pretrained, + progress, **kwargs) + + +def iresnet200(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet200', IBasicBlock, [6, 26, 60, 6], pretrained, + progress, **kwargs) diff --git a/arcface_torch/backbones/iresnet2060.py b/arcface_torch/backbones/iresnet2060.py new file mode 100644 index 0000000..21d1122 --- /dev/null +++ b/arcface_torch/backbones/iresnet2060.py @@ -0,0 +1,176 @@ +import torch +from torch import nn + +assert torch.__version__ >= "1.8.1" +from torch.utils.checkpoint import checkpoint_sequential + +__all__ = ['iresnet2060'] + + +def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, + out_planes, + kernel_size=3, + stride=stride, + padding=dilation, + groups=groups, + bias=False, + dilation=dilation) + + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution""" + return nn.Conv2d(in_planes, + out_planes, + kernel_size=1, + stride=stride, + bias=False) + + +class IBasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None, + groups=1, base_width=64, dilation=1): + super(IBasicBlock, self).__init__() + if groups != 1 or base_width != 64: + raise ValueError('BasicBlock only supports groups=1 and base_width=64') + if dilation > 1: + raise NotImplementedError("Dilation > 1 not supported in BasicBlock") + self.bn1 = nn.BatchNorm2d(inplanes, eps=1e-05, ) + self.conv1 = conv3x3(inplanes, planes) + self.bn2 = nn.BatchNorm2d(planes, eps=1e-05, ) + self.prelu = nn.PReLU(planes) + self.conv2 = conv3x3(planes, planes, stride) + self.bn3 = nn.BatchNorm2d(planes, eps=1e-05, ) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + out = self.bn1(x) + out = self.conv1(out) + out = self.bn2(out) + out = self.prelu(out) + out = self.conv2(out) + out = self.bn3(out) + if self.downsample is not None: + identity = self.downsample(x) + out += identity + return out + + +class IResNet(nn.Module): + fc_scale = 7 * 7 + + def __init__(self, + block, layers, dropout=0, num_features=512, zero_init_residual=False, + groups=1, width_per_group=64, replace_stride_with_dilation=None, fp16=False): + super(IResNet, self).__init__() + self.fp16 = fp16 + self.inplanes = 64 + self.dilation = 1 + if replace_stride_with_dilation is None: + replace_stride_with_dilation = [False, False, False] + if len(replace_stride_with_dilation) != 3: + raise ValueError("replace_stride_with_dilation should be None " + "or a 3-element tuple, got {}".format(replace_stride_with_dilation)) + self.groups = groups + self.base_width = width_per_group + self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(self.inplanes, eps=1e-05) + self.prelu = nn.PReLU(self.inplanes) + self.layer1 = self._make_layer(block, 64, layers[0], stride=2) + self.layer2 = self._make_layer(block, + 128, + layers[1], + stride=2, + dilate=replace_stride_with_dilation[0]) + self.layer3 = self._make_layer(block, + 256, + layers[2], + stride=2, + dilate=replace_stride_with_dilation[1]) + self.layer4 = self._make_layer(block, + 512, + layers[3], + stride=2, + dilate=replace_stride_with_dilation[2]) + self.bn2 = nn.BatchNorm2d(512 * block.expansion, eps=1e-05, ) + self.dropout = nn.Dropout(p=dropout, inplace=True) + self.fc = nn.Linear(512 * block.expansion * self.fc_scale, num_features) + self.features = nn.BatchNorm1d(num_features, eps=1e-05) + nn.init.constant_(self.features.weight, 1.0) + self.features.weight.requires_grad = False + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.normal_(m.weight, 0, 0.1) + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, IBasicBlock): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1, dilate=False): + downsample = None + previous_dilation = self.dilation + if dilate: + self.dilation *= stride + stride = 1 + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + nn.BatchNorm2d(planes * block.expansion, eps=1e-05, ), + ) + layers = [] + layers.append( + block(self.inplanes, planes, stride, downsample, self.groups, + self.base_width, previous_dilation)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append( + block(self.inplanes, + planes, + groups=self.groups, + base_width=self.base_width, + dilation=self.dilation)) + + return nn.Sequential(*layers) + + def checkpoint(self, func, num_seg, x): + if self.training: + return checkpoint_sequential(func, num_seg, x) + else: + return func(x) + + def forward(self, x): + with torch.cuda.amp.autocast(self.fp16): + x = self.conv1(x) + x = self.bn1(x) + x = self.prelu(x) + x = self.layer1(x) + x = self.checkpoint(self.layer2, 20, x) + x = self.checkpoint(self.layer3, 100, x) + x = self.layer4(x) + x = self.bn2(x) + x = torch.flatten(x, 1) + x = self.dropout(x) + x = self.fc(x.float() if self.fp16 else x) + x = self.features(x) + return x + + +def _iresnet(arch, block, layers, pretrained, progress, **kwargs): + model = IResNet(block, layers, **kwargs) + if pretrained: + raise ValueError() + return model + + +def iresnet2060(pretrained=False, progress=True, **kwargs): + return _iresnet('iresnet2060', IBasicBlock, [3, 128, 1024 - 128, 3], pretrained, progress, **kwargs) diff --git a/arcface_torch/backbones/mobilefacenet.py b/arcface_torch/backbones/mobilefacenet.py new file mode 100644 index 0000000..8773149 --- /dev/null +++ b/arcface_torch/backbones/mobilefacenet.py @@ -0,0 +1,130 @@ +''' +Adapted from https://github.com/cavalleria/cavaface.pytorch/blob/master/backbone/mobilefacenet.py +Original author cavalleria +''' + +import torch.nn as nn +from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, Sequential, Module +import torch + + +class Flatten(Module): + def forward(self, x): + return x.view(x.size(0), -1) + + +class ConvBlock(Module): + def __init__(self, in_c, out_c, kernel=(1, 1), stride=(1, 1), padding=(0, 0), groups=1): + super(ConvBlock, self).__init__() + self.layers = nn.Sequential( + Conv2d(in_c, out_c, kernel, groups=groups, stride=stride, padding=padding, bias=False), + BatchNorm2d(num_features=out_c), + PReLU(num_parameters=out_c) + ) + + def forward(self, x): + return self.layers(x) + + +class LinearBlock(Module): + def __init__(self, in_c, out_c, kernel=(1, 1), stride=(1, 1), padding=(0, 0), groups=1): + super(LinearBlock, self).__init__() + self.layers = nn.Sequential( + Conv2d(in_c, out_c, kernel, stride, padding, groups=groups, bias=False), + BatchNorm2d(num_features=out_c) + ) + + def forward(self, x): + return self.layers(x) + + +class DepthWise(Module): + def __init__(self, in_c, out_c, residual=False, kernel=(3, 3), stride=(2, 2), padding=(1, 1), groups=1): + super(DepthWise, self).__init__() + self.residual = residual + self.layers = nn.Sequential( + ConvBlock(in_c, out_c=groups, kernel=(1, 1), padding=(0, 0), stride=(1, 1)), + ConvBlock(groups, groups, groups=groups, kernel=kernel, padding=padding, stride=stride), + LinearBlock(groups, out_c, kernel=(1, 1), padding=(0, 0), stride=(1, 1)) + ) + + def forward(self, x): + short_cut = None + if self.residual: + short_cut = x + x = self.layers(x) + if self.residual: + output = short_cut + x + else: + output = x + return output + + +class Residual(Module): + def __init__(self, c, num_block, groups, kernel=(3, 3), stride=(1, 1), padding=(1, 1)): + super(Residual, self).__init__() + modules = [] + for _ in range(num_block): + modules.append(DepthWise(c, c, True, kernel, stride, padding, groups)) + self.layers = Sequential(*modules) + + def forward(self, x): + return self.layers(x) + + +class GDC(Module): + def __init__(self, embedding_size): + super(GDC, self).__init__() + self.layers = nn.Sequential( + LinearBlock(512, 512, groups=512, kernel=(7, 7), stride=(1, 1), padding=(0, 0)), + Flatten(), + Linear(512, embedding_size, bias=False), + BatchNorm1d(embedding_size)) + + def forward(self, x): + return self.layers(x) + + +class MobileFaceNet(Module): + def __init__(self, fp16=False, num_features=512): + super(MobileFaceNet, self).__init__() + scale = 2 + self.fp16 = fp16 + self.layers = nn.Sequential( + ConvBlock(3, 64 * scale, kernel=(3, 3), stride=(2, 2), padding=(1, 1)), + ConvBlock(64 * scale, 64 * scale, kernel=(3, 3), stride=(1, 1), padding=(1, 1), groups=64), + DepthWise(64 * scale, 64 * scale, kernel=(3, 3), stride=(2, 2), padding=(1, 1), groups=128), + Residual(64 * scale, num_block=4, groups=128, kernel=(3, 3), stride=(1, 1), padding=(1, 1)), + DepthWise(64 * scale, 128 * scale, kernel=(3, 3), stride=(2, 2), padding=(1, 1), groups=256), + Residual(128 * scale, num_block=6, groups=256, kernel=(3, 3), stride=(1, 1), padding=(1, 1)), + DepthWise(128 * scale, 128 * scale, kernel=(3, 3), stride=(2, 2), padding=(1, 1), groups=512), + Residual(128 * scale, num_block=2, groups=256, kernel=(3, 3), stride=(1, 1), padding=(1, 1)), + ) + self.conv_sep = ConvBlock(128 * scale, 512, kernel=(1, 1), stride=(1, 1), padding=(0, 0)) + self.features = GDC(num_features) + self._initialize_weights() + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + m.bias.data.zero_() + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x): + with torch.cuda.amp.autocast(self.fp16): + x = self.layers(x) + x = self.conv_sep(x.float() if self.fp16 else x) + x = self.features(x) + return x + + +def get_mbf(fp16, num_features): + return MobileFaceNet(fp16, num_features) \ No newline at end of file diff --git a/arcface_torch/configs/3millions.py b/arcface_torch/configs/3millions.py new file mode 100644 index 0000000..559ebe3 --- /dev/null +++ b/arcface_torch/configs/3millions.py @@ -0,0 +1,22 @@ +from easydict import EasyDict as edict + +# configs for test speed + +config = edict() +config.loss = "cosface" +config.network = "r50" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.99 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 64 # total_batch_size = batch_size * num_gpus +config.lr = 0.1 # batch size is 512 + +config.rec = "synthetic" +config.num_classes = 300 * 10000 +config.num_epoch = 30 +config.warmup_epoch = -1 +config.val_targets = [] diff --git a/arcface_torch/configs/__init__.py b/arcface_torch/configs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/arcface_torch/configs/base.py b/arcface_torch/configs/base.py new file mode 100644 index 0000000..5c96d42 --- /dev/null +++ b/arcface_torch/configs/base.py @@ -0,0 +1,47 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "arcface" +config.network = "r50" +config.resume = False +config.output = "ms1mv3_arcface_r50" + +config.embedding_size = 512 +config.sample_rate = 1 +config.fp16 = False +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 128 +config.lr = 0.1 # batch size is 512 +config.dali = False +config.verbose = 2000 +config.frequent = 10 +config.score = None + +# if config.dataset == "emore": +# config.rec = "/train_tmp/faces_emore" +# config.num_classes = 85742 +# config.num_image = 5822653 +# config.num_epoch = 16 +# config.warmup_epoch = -1 +# config.val_targets = ["lfw", ] + +# elif config.dataset == "ms1m-retinaface-t1": +# config.rec = "/train_tmp/ms1m-retinaface-t1" +# config.num_classes = 93431 +# config.num_image = 5179510 +# config.num_epoch = 25 +# config.warmup_epoch = -1 +# config.val_targets = ["lfw", "cfp_fp", "agedb_30"] + +# elif config.dataset == "glint360k": +# config.rec = "/train_tmp/glint360k" +# config.num_classes = 360232 +# config.num_image = 17091657 +# config.num_epoch = 20 +# config.warmup_epoch = -1 +# config.val_targets = ["lfw", "cfp_fp", "agedb_30"] diff --git a/arcface_torch/configs/glint360k_mobileface_lr02_bs4k.py b/arcface_torch/configs/glint360k_mobileface_lr02_bs4k.py new file mode 100644 index 0000000..485e31f --- /dev/null +++ b/arcface_torch/configs/glint360k_mobileface_lr02_bs4k.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "mbf" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 1.0 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 1e-4 +config.batch_size = 512 +config.lr = 0.4 +config.verbose = 5000 +config.dali = False + +config.rec = "/train_tmp/glint360k" +config.num_classes = 360232 +config.num_image = 17091657 +config.num_epoch = 20 +config.warmup_epoch = 2 +config.val_targets = ['lfw', 'cfp_fp', "agedb_30"] diff --git a/arcface_torch/configs/glint360k_r100_lr02_bs4k_16gpus.py b/arcface_torch/configs/glint360k_r100_lr02_bs4k_16gpus.py new file mode 100644 index 0000000..0e87c8f --- /dev/null +++ b/arcface_torch/configs/glint360k_r100_lr02_bs4k_16gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "r100" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 1.0 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 256 +config.lr = 0.4 +config.verbose = 5000 +config.dali = False + +config.rec = "/train_tmp/glint360k" +config.num_classes = 360232 +config.num_image = 17091657 +config.num_epoch = 20 +config.warmup_epoch = 2 +config.val_targets = ['lfw', 'cfp_fp', "agedb_30"] diff --git a/arcface_torch/configs/ms1mv3_mobileface_lr02.py b/arcface_torch/configs/ms1mv3_mobileface_lr02.py new file mode 100644 index 0000000..f5dcaa1 --- /dev/null +++ b/arcface_torch/configs/ms1mv3_mobileface_lr02.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "arcface" +config.network = "mbf" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 1.0 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 1e-4 +config.batch_size = 256 +config.lr = 0.2 +config.verbose = 5000 +config.dali = False + +config.rec = "/train_tmp/ms1m-retinaface-t1" +config.num_classes = 93431 +config.num_image = 5179510 +config.num_epoch = 40 +config.warmup_epoch = 2 +config.val_targets = ['lfw', 'cfp_fp', "agedb_30"] diff --git a/arcface_torch/configs/ms1mv3_r100_lr02.py b/arcface_torch/configs/ms1mv3_r100_lr02.py new file mode 100644 index 0000000..ec4caef --- /dev/null +++ b/arcface_torch/configs/ms1mv3_r100_lr02.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "arcface" +config.network = "r100" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 1.0 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 128 +config.lr = 0.2 +config.verbose = 2000 +config.dali = False + +config.rec = "/train_tmp/ms1m-retinaface-t1" +config.num_classes = 93431 +config.num_image = 5179510 +config.num_epoch = 25 +config.warmup_epoch = 0 +config.val_targets = ['lfw', 'cfp_fp', "agedb_30"] diff --git a/arcface_torch/configs/ms1mv3_r50_lr02.py b/arcface_torch/configs/ms1mv3_r50_lr02.py new file mode 100644 index 0000000..2eefde4 --- /dev/null +++ b/arcface_torch/configs/ms1mv3_r50_lr02.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "arcface" +config.network = "r50" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 1.0 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 128 +config.lr = 0.2 +config.verbose = 2000 +config.dali = False + +config.rec = "/train_tmp/ms1m-retinaface-t1" +config.num_classes = 93431 +config.num_image = 5179510 +config.num_epoch = 25 +config.warmup_epoch = 2 +config.val_targets = ['lfw', 'cfp_fp', "agedb_30"] diff --git a/arcface_torch/configs/webface42m_mobilefacenet_pfc02_bs8k_16gpus.py b/arcface_torch/configs/webface42m_mobilefacenet_pfc02_bs8k_16gpus.py new file mode 100644 index 0000000..5cd522f --- /dev/null +++ b/arcface_torch/configs/webface42m_mobilefacenet_pfc02_bs8k_16gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "mbf" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.2 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 1e-4 +config.batch_size = 512 +config.lr = 0.4 +config.verbose = 10000 +config.dali = False + +config.rec = "/train_tmp/WebFace42M" +config.num_classes = 2059906 +config.num_image = 42474557 +config.num_epoch = 20 +config.warmup_epoch = 2 +config.val_targets = [] diff --git a/arcface_torch/configs/webface42m_r100_lr01_pfc02_bs4k_16gpus.py b/arcface_torch/configs/webface42m_r100_lr01_pfc02_bs4k_16gpus.py new file mode 100644 index 0000000..e46f4e2 --- /dev/null +++ b/arcface_torch/configs/webface42m_r100_lr01_pfc02_bs4k_16gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "r100" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.2 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 256 +config.lr = 0.3 +config.verbose = 2000 +config.dali = False + +config.rec = "/train_tmp/WebFace42M" +config.num_classes = 2059906 +config.num_image = 42474557 +config.num_epoch = 20 +config.warmup_epoch = 1 +config.val_targets = ["lfw", "cfp_fp", "agedb_30"] diff --git a/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_32gpus.py b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_32gpus.py new file mode 100644 index 0000000..b5eb8bc --- /dev/null +++ b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_32gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "r50" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.2 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 128 +config.lr = 0.4 +config.verbose = 10000 +config.dali = False + +config.rec = "/train_tmp/WebFace42M" +config.num_classes = 2059906 +config.num_image = 42474557 +config.num_epoch = 20 +config.warmup_epoch = 2 +config.val_targets = ["lfw", "cfp_fp", "agedb_30"] diff --git a/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_8gpus.py b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_8gpus.py new file mode 100644 index 0000000..6b63b7d --- /dev/null +++ b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs4k_8gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "r50" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.2 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 512 +config.lr = 0.4 +config.verbose = 10000 +config.dali = False + +config.rec = "/train_tmp/WebFace42M" +config.num_classes = 2059906 +config.num_image = 42474557 +config.num_epoch = 20 +config.warmup_epoch = 2 +config.val_targets = ["lfw", "cfp_fp", "agedb_30"] diff --git a/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs8k_16gpus.py b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs8k_16gpus.py new file mode 100644 index 0000000..699d4a8 --- /dev/null +++ b/arcface_torch/configs/webface42m_r50_lr01_pfc02_bs8k_16gpus.py @@ -0,0 +1,27 @@ +from easydict import EasyDict as edict + +# make training faster +# our RAM is 256G +# mount -t tmpfs -o size=140G tmpfs /train_tmp + +config = edict() +config.loss = "cosface" +config.network = "r50" +config.resume = False +config.output = None +config.embedding_size = 512 +config.sample_rate = 0.2 +config.fp16 = True +config.momentum = 0.9 +config.weight_decay = 5e-4 +config.batch_size = 512 +config.lr = 0.6 +config.verbose = 10000 +config.dali = False + +config.rec = "/train_tmp/WebFace42M" +config.num_classes = 2059906 +config.num_image = 42474557 +config.num_epoch = 20 +config.warmup_epoch = 4 +config.val_targets = ["lfw", "cfp_fp", "agedb_30"] diff --git a/arcface_torch/dataset.py b/arcface_torch/dataset.py new file mode 100644 index 0000000..80d562e --- /dev/null +++ b/arcface_torch/dataset.py @@ -0,0 +1,209 @@ +import numbers +import os +import queue as Queue +import threading +from typing import Iterable + +import mxnet as mx +import numpy as np +import torch +from torch import distributed +from torch.utils.data import DataLoader, Dataset +from torchvision import transforms + +def get_dataloader( + root_dir: str, + local_rank: int, + batch_size: int, + dali = False) -> Iterable: + if dali and root_dir != "synthetic": + rec = os.path.join(root_dir, 'train.rec') + idx = os.path.join(root_dir, 'train.idx') + return dali_data_iter( + batch_size=batch_size, rec_file=rec, + idx_file=idx, num_threads=2, local_rank=local_rank) + else: + if root_dir == "synthetic": + train_set = SyntheticDataset() + else: + train_set = MXFaceDataset(root_dir=root_dir, local_rank=local_rank) + train_sampler = torch.utils.data.distributed.DistributedSampler(train_set, shuffle=True) + train_loader = DataLoaderX( + local_rank=local_rank, + dataset=train_set, + batch_size=batch_size, + sampler=train_sampler, + num_workers=2, + pin_memory=True, + drop_last=True, + ) + return train_loader + +class BackgroundGenerator(threading.Thread): + def __init__(self, generator, local_rank, max_prefetch=6): + super(BackgroundGenerator, self).__init__() + self.queue = Queue.Queue(max_prefetch) + self.generator = generator + self.local_rank = local_rank + self.daemon = True + self.start() + + def run(self): + torch.cuda.set_device(self.local_rank) + for item in self.generator: + self.queue.put(item) + self.queue.put(None) + + def next(self): + next_item = self.queue.get() + if next_item is None: + raise StopIteration + return next_item + + def __next__(self): + return self.next() + + def __iter__(self): + return self + + +class DataLoaderX(DataLoader): + + def __init__(self, local_rank, **kwargs): + super(DataLoaderX, self).__init__(**kwargs) + self.stream = torch.cuda.Stream(local_rank) + self.local_rank = local_rank + + def __iter__(self): + self.iter = super(DataLoaderX, self).__iter__() + self.iter = BackgroundGenerator(self.iter, self.local_rank) + self.preload() + return self + + def preload(self): + self.batch = next(self.iter, None) + if self.batch is None: + return None + with torch.cuda.stream(self.stream): + for k in range(len(self.batch)): + self.batch[k] = self.batch[k].to(device=self.local_rank, non_blocking=True) + + def __next__(self): + torch.cuda.current_stream().wait_stream(self.stream) + batch = self.batch + if batch is None: + raise StopIteration + self.preload() + return batch + + +class MXFaceDataset(Dataset): + def __init__(self, root_dir, local_rank): + super(MXFaceDataset, self).__init__() + self.transform = transforms.Compose( + [transforms.ToPILImage(), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]), + ]) + self.root_dir = root_dir + self.local_rank = local_rank + path_imgrec = os.path.join(root_dir, 'train.rec') + path_imgidx = os.path.join(root_dir, 'train.idx') + self.imgrec = mx.recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r') + s = self.imgrec.read_idx(0) + header, _ = mx.recordio.unpack(s) + if header.flag > 0: + self.header0 = (int(header.label[0]), int(header.label[1])) + self.imgidx = np.array(range(1, int(header.label[0]))) + else: + self.imgidx = np.array(list(self.imgrec.keys)) + + def __getitem__(self, index): + idx = self.imgidx[index] + s = self.imgrec.read_idx(idx) + header, img = mx.recordio.unpack(s) + label = header.label + if not isinstance(label, numbers.Number): + label = label[0] + label = torch.tensor(label, dtype=torch.long) + sample = mx.image.imdecode(img).asnumpy() + if self.transform is not None: + sample = self.transform(sample) + return sample, label + + def __len__(self): + return len(self.imgidx) + + +class SyntheticDataset(Dataset): + def __init__(self): + super(SyntheticDataset, self).__init__() + img = np.random.randint(0, 255, size=(112, 112, 3), dtype=np.int32) + img = np.transpose(img, (2, 0, 1)) + img = torch.from_numpy(img).squeeze(0).float() + img = ((img / 255) - 0.5) / 0.5 + self.img = img + self.label = 1 + + def __getitem__(self, index): + return self.img, self.label + + def __len__(self): + return 1000000 + + +def dali_data_iter( + batch_size: int, rec_file: str, idx_file: str, num_threads: int, + initial_fill=32768, random_shuffle=True, + prefetch_queue_depth=1, local_rank=0, name="reader", + mean=(127.5, 127.5, 127.5), + std=(127.5, 127.5, 127.5)): + """ + Parameters: + ---------- + initial_fill: int + Size of the buffer that is used for shuffling. If random_shuffle is False, this parameter is ignored. + + """ + rank: int = distributed.get_rank() + world_size: int = distributed.get_world_size() + import nvidia.dali.fn as fn + import nvidia.dali.types as types + from nvidia.dali.pipeline import Pipeline + from nvidia.dali.plugin.pytorch import DALIClassificationIterator + + pipe = Pipeline( + batch_size=batch_size, num_threads=num_threads, + device_id=local_rank, prefetch_queue_depth=prefetch_queue_depth, ) + condition_flip = fn.random.coin_flip(probability=0.5) + with pipe: + jpegs, labels = fn.readers.mxnet( + path=rec_file, index_path=idx_file, initial_fill=initial_fill, + num_shards=world_size, shard_id=rank, + random_shuffle=random_shuffle, pad_last_batch=False, name=name) + images = fn.decoders.image(jpegs, device="mixed", output_type=types.RGB) + images = fn.crop_mirror_normalize( + images, dtype=types.FLOAT, mean=mean, std=std, mirror=condition_flip) + pipe.set_outputs(images, labels) + pipe.build() + return DALIWarper(DALIClassificationIterator(pipelines=[pipe], reader_name=name, )) + + +@torch.no_grad() +class DALIWarper(object): + def __init__(self, dali_iter): + self.iter = dali_iter + + def __next__(self): + data_dict = self.iter.__next__()[0] + tensor_data = data_dict['data'].cuda() + tensor_label: torch.Tensor = data_dict['label'].cuda().long() + tensor_label.squeeze_() + return tensor_data, tensor_label + + def __iter__(self): + return self + + def reset(self): + self.iter.reset() diff --git a/arcface_torch/docs/eval.md b/arcface_torch/docs/eval.md new file mode 100644 index 0000000..dd1d9e2 --- /dev/null +++ b/arcface_torch/docs/eval.md @@ -0,0 +1,31 @@ +## Eval on ICCV2021-MFR + +coming soon. + + +## Eval IJBC +You can eval ijbc with pytorch or onnx. + + +1. Eval IJBC With Onnx +```shell +CUDA_VISIBLE_DEVICES=0 python onnx_ijbc.py --model-root ms1mv3_arcface_r50 --image-path IJB_release/IJBC --result-dir ms1mv3_arcface_r50 +``` + +2. Eval IJBC With Pytorch +```shell +CUDA_VISIBLE_DEVICES=0,1 python eval_ijbc.py \ +--model-prefix ms1mv3_arcface_r50/backbone.pth \ +--image-path IJB_release/IJBC \ +--result-dir ms1mv3_arcface_r50 \ +--batch-size 128 \ +--job ms1mv3_arcface_r50 \ +--target IJBC \ +--network iresnet50 +``` + +## Inference + +```shell +python inference.py --weight ms1mv3_arcface_r50/backbone.pth --network r50 +``` diff --git a/arcface_torch/docs/install.md b/arcface_torch/docs/install.md new file mode 100644 index 0000000..6314a40 --- /dev/null +++ b/arcface_torch/docs/install.md @@ -0,0 +1,51 @@ +## v1.8.0 +### Linux and Windows +```shell +# CUDA 11.0 +pip --default-timeout=100 install torch==1.8.0+cu111 torchvision==0.9.0+cu111 torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html + +# CUDA 10.2 +pip --default-timeout=100 install torch==1.8.0 torchvision==0.9.0 torchaudio==0.8.0 + +# CPU only +pip --default-timeout=100 install torch==1.8.0+cpu torchvision==0.9.0+cpu torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html + +``` + + +## v1.7.1 +### Linux and Windows +```shell +# CUDA 11.0 +pip install torch==1.7.1+cu110 torchvision==0.8.2+cu110 torchaudio==0.7.2 -f https://download.pytorch.org/whl/torch_stable.html + +# CUDA 10.2 +pip install torch==1.7.1 torchvision==0.8.2 torchaudio==0.7.2 + +# CUDA 10.1 +pip install torch==1.7.1+cu101 torchvision==0.8.2+cu101 torchaudio==0.7.2 -f https://download.pytorch.org/whl/torch_stable.html + +# CUDA 9.2 +pip install torch==1.7.1+cu92 torchvision==0.8.2+cu92 torchaudio==0.7.2 -f https://download.pytorch.org/whl/torch_stable.html + +# CPU only +pip install torch==1.7.1+cpu torchvision==0.8.2+cpu torchaudio==0.7.2 -f https://download.pytorch.org/whl/torch_stable.html +``` + + +## v1.6.0 + +### Linux and Windows +```shell +# CUDA 10.2 +pip install torch==1.6.0 torchvision==0.7.0 + +# CUDA 10.1 +pip install torch==1.6.0+cu101 torchvision==0.7.0+cu101 -f https://download.pytorch.org/whl/torch_stable.html + +# CUDA 9.2 +pip install torch==1.6.0+cu92 torchvision==0.7.0+cu92 -f https://download.pytorch.org/whl/torch_stable.html + +# CPU only +pip install torch==1.6.0+cpu torchvision==0.7.0+cpu -f https://download.pytorch.org/whl/torch_stable.html +``` \ No newline at end of file diff --git a/arcface_torch/docs/install_dali.md b/arcface_torch/docs/install_dali.md new file mode 100644 index 0000000..1333ed7 --- /dev/null +++ b/arcface_torch/docs/install_dali.md @@ -0,0 +1 @@ +TODO diff --git a/arcface_torch/docs/modelzoo.md b/arcface_torch/docs/modelzoo.md new file mode 100644 index 0000000..e69de29 diff --git a/arcface_torch/docs/prepare_webface42m.md b/arcface_torch/docs/prepare_webface42m.md new file mode 100644 index 0000000..1675edb --- /dev/null +++ b/arcface_torch/docs/prepare_webface42m.md @@ -0,0 +1,22 @@ + + + +## 1. Download Datasets and Unzip + +Download WebFace42M from [https://www.face-benchmark.org/download.html](https://www.face-benchmark.org/download.html). + + +## 2. Create **Pre-shuffle** Rec File for DALI + +Note: preshuffled rec is very important to DALI, and rec without preshuffled can cause performance degradation, origin insightface style rec file +do not support Nvidia DALI, you must follow this command [mxnet.tools.im2rec](https://github.com/apache/incubator-mxnet/blob/master/tools/im2rec.py) to generate a pre-shuffle rec file. + +```shell +# 1) create train.lst using follow command +python -m mxnet.tools.im2rec --list --recursive train "Your WebFace42M Root" + +# 2) create train.rec and train.idx using train.lst using following command +python -m mxnet.tools.im2rec --num-thread 16 --quality 100 train "Your WebFace42M Root" +``` + +Finally, you will get three files: `train.lst`, `train.rec`, `train.idx`. which `train.idx`, `train.rec` are using for training. diff --git a/arcface_torch/docs/speed_benchmark.md b/arcface_torch/docs/speed_benchmark.md new file mode 100644 index 0000000..055aee0 --- /dev/null +++ b/arcface_torch/docs/speed_benchmark.md @@ -0,0 +1,93 @@ +## Test Training Speed + +- Test Commands + +You need to use the following two commands to test the Partial FC training performance. +The number of identites is **3 millions** (synthetic data), turn mixed precision training on, backbone is resnet50, +batch size is 1024. +```shell +# Model Parallel +python -m torch.distributed.launch --nproc_per_node=8 --nnodes=1 --node_rank=0 --master_addr="127.0.0.1" --master_port=1234 train.py configs/3millions +# Partial FC 0.1 +python -m torch.distributed.launch --nproc_per_node=8 --nnodes=1 --node_rank=0 --master_addr="127.0.0.1" --master_port=1234 train.py configs/3millions_pfc +``` + +- GPU Memory + +``` +# (Model Parallel) gpustat -i +[0] Tesla V100-SXM2-32GB | 64'C, 94 % | 30338 / 32510 MB +[1] Tesla V100-SXM2-32GB | 60'C, 99 % | 28876 / 32510 MB +[2] Tesla V100-SXM2-32GB | 60'C, 99 % | 28872 / 32510 MB +[3] Tesla V100-SXM2-32GB | 69'C, 99 % | 28872 / 32510 MB +[4] Tesla V100-SXM2-32GB | 66'C, 99 % | 28888 / 32510 MB +[5] Tesla V100-SXM2-32GB | 60'C, 99 % | 28932 / 32510 MB +[6] Tesla V100-SXM2-32GB | 68'C, 100 % | 28916 / 32510 MB +[7] Tesla V100-SXM2-32GB | 65'C, 99 % | 28860 / 32510 MB + +# (Partial FC 0.1) gpustat -i +[0] Tesla V100-SXM2-32GB | 60'C, 95 % | 10488 / 32510 MB │······················· +[1] Tesla V100-SXM2-32GB | 60'C, 97 % | 10344 / 32510 MB │······················· +[2] Tesla V100-SXM2-32GB | 61'C, 95 % | 10340 / 32510 MB │······················· +[3] Tesla V100-SXM2-32GB | 66'C, 95 % | 10340 / 32510 MB │······················· +[4] Tesla V100-SXM2-32GB | 65'C, 94 % | 10356 / 32510 MB │······················· +[5] Tesla V100-SXM2-32GB | 61'C, 95 % | 10400 / 32510 MB │······················· +[6] Tesla V100-SXM2-32GB | 68'C, 96 % | 10384 / 32510 MB │······················· +[7] Tesla V100-SXM2-32GB | 64'C, 95 % | 10328 / 32510 MB │······················· +``` + +- Training Speed + +```python +# (Model Parallel) trainging.log +Training: Speed 2271.33 samples/sec Loss 1.1624 LearningRate 0.2000 Epoch: 0 Global Step: 100 +Training: Speed 2269.94 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 150 +Training: Speed 2272.67 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 200 +Training: Speed 2266.55 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 250 +Training: Speed 2272.54 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 300 + +# (Partial FC 0.1) trainging.log +Training: Speed 5299.56 samples/sec Loss 1.0965 LearningRate 0.2000 Epoch: 0 Global Step: 100 +Training: Speed 5296.37 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 150 +Training: Speed 5304.37 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 200 +Training: Speed 5274.43 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 250 +Training: Speed 5300.10 samples/sec Loss 0.0000 LearningRate 0.2000 Epoch: 0 Global Step: 300 +``` + +In this test case, Partial FC 0.1 only use1 1/3 of the GPU memory of the model parallel, +and the training speed is 2.5 times faster than the model parallel. + + +## Speed Benchmark + +1. Training speed of different parallel methods (samples/second), Tesla V100 32GB * 8. (Larger is better) + +| Number of Identities in Dataset | Data Parallel | Model Parallel | Partial FC 0.1 | +| :--- | :--- | :--- | :--- | +|125000 | 4681 | 4824 | 5004 | +|250000 | 4047 | 4521 | 4976 | +|500000 | 3087 | 4013 | 4900 | +|1000000 | 2090 | 3449 | 4803 | +|1400000 | 1672 | 3043 | 4738 | +|2000000 | - | 2593 | 4626 | +|4000000 | - | 1748 | 4208 | +|5500000 | - | 1389 | 3975 | +|8000000 | - | - | 3565 | +|16000000 | - | - | 2679 | +|29000000 | - | - | 1855 | + +2. GPU memory cost of different parallel methods (GB per GPU), Tesla V100 32GB * 8. (Smaller is better) + +| Number of Identities in Dataset | Data Parallel | Model Parallel | Partial FC 0.1 | +| :--- | :--- | :--- | :--- | +|125000 | 7358 | 5306 | 4868 | +|250000 | 9940 | 5826 | 5004 | +|500000 | 14220 | 7114 | 5202 | +|1000000 | 23708 | 9966 | 5620 | +|1400000 | 32252 | 11178 | 6056 | +|2000000 | - | 13978 | 6472 | +|4000000 | - | 23238 | 8284 | +|5500000 | - | 32188 | 9854 | +|8000000 | - | - | 12310 | +|16000000 | - | - | 19950 | +|29000000 | - | - | 32324 | diff --git a/arcface_torch/eval/__init__.py b/arcface_torch/eval/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/arcface_torch/eval/verification.py b/arcface_torch/eval/verification.py new file mode 100644 index 0000000..edacf8d --- /dev/null +++ b/arcface_torch/eval/verification.py @@ -0,0 +1,409 @@ +"""Helper for evaluation on the Labeled Faces in the Wild dataset +""" + +# MIT License +# +# Copyright (c) 2016 David Sandberg +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +import datetime +import os +import pickle + +import mxnet as mx +import numpy as np +import sklearn +import torch +from mxnet import ndarray as nd +from scipy import interpolate +from sklearn.decomposition import PCA +from sklearn.model_selection import KFold + + +class LFold: + def __init__(self, n_splits=2, shuffle=False): + self.n_splits = n_splits + if self.n_splits > 1: + self.k_fold = KFold(n_splits=n_splits, shuffle=shuffle) + + def split(self, indices): + if self.n_splits > 1: + return self.k_fold.split(indices) + else: + return [(indices, indices)] + + +def calculate_roc(thresholds, + embeddings1, + embeddings2, + actual_issame, + nrof_folds=10, + pca=0): + assert (embeddings1.shape[0] == embeddings2.shape[0]) + assert (embeddings1.shape[1] == embeddings2.shape[1]) + nrof_pairs = min(len(actual_issame), embeddings1.shape[0]) + nrof_thresholds = len(thresholds) + k_fold = LFold(n_splits=nrof_folds, shuffle=False) + + tprs = np.zeros((nrof_folds, nrof_thresholds)) + fprs = np.zeros((nrof_folds, nrof_thresholds)) + accuracy = np.zeros((nrof_folds)) + indices = np.arange(nrof_pairs) + + if pca == 0: + diff = np.subtract(embeddings1, embeddings2) + dist = np.sum(np.square(diff), 1) + + for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)): + if pca > 0: + print('doing pca on', fold_idx) + embed1_train = embeddings1[train_set] + embed2_train = embeddings2[train_set] + _embed_train = np.concatenate((embed1_train, embed2_train), axis=0) + pca_model = PCA(n_components=pca) + pca_model.fit(_embed_train) + embed1 = pca_model.transform(embeddings1) + embed2 = pca_model.transform(embeddings2) + embed1 = sklearn.preprocessing.normalize(embed1) + embed2 = sklearn.preprocessing.normalize(embed2) + diff = np.subtract(embed1, embed2) + dist = np.sum(np.square(diff), 1) + + # Find the best threshold for the fold + acc_train = np.zeros((nrof_thresholds)) + for threshold_idx, threshold in enumerate(thresholds): + _, _, acc_train[threshold_idx] = calculate_accuracy( + threshold, dist[train_set], actual_issame[train_set]) + best_threshold_index = np.argmax(acc_train) + for threshold_idx, threshold in enumerate(thresholds): + tprs[fold_idx, threshold_idx], fprs[fold_idx, threshold_idx], _ = calculate_accuracy( + threshold, dist[test_set], + actual_issame[test_set]) + _, _, accuracy[fold_idx] = calculate_accuracy( + thresholds[best_threshold_index], dist[test_set], + actual_issame[test_set]) + + tpr = np.mean(tprs, 0) + fpr = np.mean(fprs, 0) + return tpr, fpr, accuracy + + +def calculate_accuracy(threshold, dist, actual_issame): + predict_issame = np.less(dist, threshold) + tp = np.sum(np.logical_and(predict_issame, actual_issame)) + fp = np.sum(np.logical_and(predict_issame, np.logical_not(actual_issame))) + tn = np.sum( + np.logical_and(np.logical_not(predict_issame), + np.logical_not(actual_issame))) + fn = np.sum(np.logical_and(np.logical_not(predict_issame), actual_issame)) + + tpr = 0 if (tp + fn == 0) else float(tp) / float(tp + fn) + fpr = 0 if (fp + tn == 0) else float(fp) / float(fp + tn) + acc = float(tp + tn) / dist.size + return tpr, fpr, acc + + +def calculate_val(thresholds, + embeddings1, + embeddings2, + actual_issame, + far_target, + nrof_folds=10): + assert (embeddings1.shape[0] == embeddings2.shape[0]) + assert (embeddings1.shape[1] == embeddings2.shape[1]) + nrof_pairs = min(len(actual_issame), embeddings1.shape[0]) + nrof_thresholds = len(thresholds) + k_fold = LFold(n_splits=nrof_folds, shuffle=False) + + val = np.zeros(nrof_folds) + far = np.zeros(nrof_folds) + + diff = np.subtract(embeddings1, embeddings2) + dist = np.sum(np.square(diff), 1) + indices = np.arange(nrof_pairs) + + for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)): + + # Find the threshold that gives FAR = far_target + far_train = np.zeros(nrof_thresholds) + for threshold_idx, threshold in enumerate(thresholds): + _, far_train[threshold_idx] = calculate_val_far( + threshold, dist[train_set], actual_issame[train_set]) + if np.max(far_train) >= far_target: + f = interpolate.interp1d(far_train, thresholds, kind='slinear') + threshold = f(far_target) + else: + threshold = 0.0 + + val[fold_idx], far[fold_idx] = calculate_val_far( + threshold, dist[test_set], actual_issame[test_set]) + + val_mean = np.mean(val) + far_mean = np.mean(far) + val_std = np.std(val) + return val_mean, val_std, far_mean + + +def calculate_val_far(threshold, dist, actual_issame): + predict_issame = np.less(dist, threshold) + true_accept = np.sum(np.logical_and(predict_issame, actual_issame)) + false_accept = np.sum( + np.logical_and(predict_issame, np.logical_not(actual_issame))) + n_same = np.sum(actual_issame) + n_diff = np.sum(np.logical_not(actual_issame)) + # print(true_accept, false_accept) + # print(n_same, n_diff) + val = float(true_accept) / float(n_same) + far = float(false_accept) / float(n_diff) + return val, far + + +def evaluate(embeddings, actual_issame, nrof_folds=10, pca=0): + # Calculate evaluation metrics + thresholds = np.arange(0, 4, 0.01) + embeddings1 = embeddings[0::2] + embeddings2 = embeddings[1::2] + tpr, fpr, accuracy = calculate_roc(thresholds, + embeddings1, + embeddings2, + np.asarray(actual_issame), + nrof_folds=nrof_folds, + pca=pca) + thresholds = np.arange(0, 4, 0.001) + val, val_std, far = calculate_val(thresholds, + embeddings1, + embeddings2, + np.asarray(actual_issame), + 1e-3, + nrof_folds=nrof_folds) + return tpr, fpr, accuracy, val, val_std, far + +@torch.no_grad() +def load_bin(path, image_size): + try: + with open(path, 'rb') as f: + bins, issame_list = pickle.load(f) # py2 + except UnicodeDecodeError as e: + with open(path, 'rb') as f: + bins, issame_list = pickle.load(f, encoding='bytes') # py3 + data_list = [] + for flip in [0, 1]: + data = torch.empty((len(issame_list) * 2, 3, image_size[0], image_size[1])) + data_list.append(data) + for idx in range(len(issame_list) * 2): + _bin = bins[idx] + img = mx.image.imdecode(_bin) + if img.shape[1] != image_size[0]: + img = mx.image.resize_short(img, image_size[0]) + img = nd.transpose(img, axes=(2, 0, 1)) + for flip in [0, 1]: + if flip == 1: + img = mx.ndarray.flip(data=img, axis=2) + data_list[flip][idx][:] = torch.from_numpy(img.asnumpy()) + if idx % 1000 == 0: + print('loading bin', idx) + print(data_list[0].shape) + return data_list, issame_list + +@torch.no_grad() +def test(data_set, backbone, batch_size, nfolds=10): + print('testing verification..') + data_list = data_set[0] + issame_list = data_set[1] + embeddings_list = [] + time_consumed = 0.0 + for i in range(len(data_list)): + data = data_list[i] + embeddings = None + ba = 0 + while ba < data.shape[0]: + bb = min(ba + batch_size, data.shape[0]) + count = bb - ba + _data = data[bb - batch_size: bb] + time0 = datetime.datetime.now() + img = ((_data / 255) - 0.5) / 0.5 + net_out: torch.Tensor = backbone(img) + _embeddings = net_out.detach().cpu().numpy() + time_now = datetime.datetime.now() + diff = time_now - time0 + time_consumed += diff.total_seconds() + if embeddings is None: + embeddings = np.zeros((data.shape[0], _embeddings.shape[1])) + embeddings[ba:bb, :] = _embeddings[(batch_size - count):, :] + ba = bb + embeddings_list.append(embeddings) + + _xnorm = 0.0 + _xnorm_cnt = 0 + for embed in embeddings_list: + for i in range(embed.shape[0]): + _em = embed[i] + _norm = np.linalg.norm(_em) + _xnorm += _norm + _xnorm_cnt += 1 + _xnorm /= _xnorm_cnt + + embeddings = embeddings_list[0].copy() + embeddings = sklearn.preprocessing.normalize(embeddings) + acc1 = 0.0 + std1 = 0.0 + embeddings = embeddings_list[0] + embeddings_list[1] + embeddings = sklearn.preprocessing.normalize(embeddings) + print(embeddings.shape) + print('infer time', time_consumed) + _, _, accuracy, val, val_std, far = evaluate(embeddings, issame_list, nrof_folds=nfolds) + acc2, std2 = np.mean(accuracy), np.std(accuracy) + return acc1, std1, acc2, std2, _xnorm, embeddings_list + + +def dumpR(data_set, + backbone, + batch_size, + name='', + data_extra=None, + label_shape=None): + print('dump verification embedding..') + data_list = data_set[0] + issame_list = data_set[1] + embeddings_list = [] + time_consumed = 0.0 + for i in range(len(data_list)): + data = data_list[i] + embeddings = None + ba = 0 + while ba < data.shape[0]: + bb = min(ba + batch_size, data.shape[0]) + count = bb - ba + + _data = nd.slice_axis(data, axis=0, begin=bb - batch_size, end=bb) + time0 = datetime.datetime.now() + if data_extra is None: + db = mx.io.DataBatch(data=(_data,), label=(_label,)) + else: + db = mx.io.DataBatch(data=(_data, _data_extra), + label=(_label,)) + model.forward(db, is_train=False) + net_out = model.get_outputs() + _embeddings = net_out[0].asnumpy() + time_now = datetime.datetime.now() + diff = time_now - time0 + time_consumed += diff.total_seconds() + if embeddings is None: + embeddings = np.zeros((data.shape[0], _embeddings.shape[1])) + embeddings[ba:bb, :] = _embeddings[(batch_size - count):, :] + ba = bb + embeddings_list.append(embeddings) + embeddings = embeddings_list[0] + embeddings_list[1] + embeddings = sklearn.preprocessing.normalize(embeddings) + actual_issame = np.asarray(issame_list) + outname = os.path.join('temp.bin') + with open(outname, 'wb') as f: + pickle.dump((embeddings, issame_list), + f, + protocol=pickle.HIGHEST_PROTOCOL) + + +# if __name__ == '__main__': +# +# parser = argparse.ArgumentParser(description='do verification') +# # general +# parser.add_argument('--data-dir', default='', help='') +# parser.add_argument('--model', +# default='../model/softmax,50', +# help='path to load model.') +# parser.add_argument('--target', +# default='lfw,cfp_ff,cfp_fp,agedb_30', +# help='test targets.') +# parser.add_argument('--gpu', default=0, type=int, help='gpu id') +# parser.add_argument('--batch-size', default=32, type=int, help='') +# parser.add_argument('--max', default='', type=str, help='') +# parser.add_argument('--mode', default=0, type=int, help='') +# parser.add_argument('--nfolds', default=10, type=int, help='') +# args = parser.parse_args() +# image_size = [112, 112] +# print('image_size', image_size) +# ctx = mx.gpu(args.gpu) +# nets = [] +# vec = args.model.split(',') +# prefix = args.model.split(',')[0] +# epochs = [] +# if len(vec) == 1: +# pdir = os.path.dirname(prefix) +# for fname in os.listdir(pdir): +# if not fname.endswith('.params'): +# continue +# _file = os.path.join(pdir, fname) +# if _file.startswith(prefix): +# epoch = int(fname.split('.')[0].split('-')[1]) +# epochs.append(epoch) +# epochs = sorted(epochs, reverse=True) +# if len(args.max) > 0: +# _max = [int(x) for x in args.max.split(',')] +# assert len(_max) == 2 +# if len(epochs) > _max[1]: +# epochs = epochs[_max[0]:_max[1]] +# +# else: +# epochs = [int(x) for x in vec[1].split('|')] +# print('model number', len(epochs)) +# time0 = datetime.datetime.now() +# for epoch in epochs: +# print('loading', prefix, epoch) +# sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch) +# # arg_params, aux_params = ch_dev(arg_params, aux_params, ctx) +# all_layers = sym.get_internals() +# sym = all_layers['fc1_output'] +# model = mx.mod.Module(symbol=sym, context=ctx, label_names=None) +# # model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0], image_size[1]))], label_shapes=[('softmax_label', (args.batch_size,))]) +# model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0], +# image_size[1]))]) +# model.set_params(arg_params, aux_params) +# nets.append(model) +# time_now = datetime.datetime.now() +# diff = time_now - time0 +# print('model loading time', diff.total_seconds()) +# +# ver_list = [] +# ver_name_list = [] +# for name in args.target.split(','): +# path = os.path.join(args.data_dir, name + ".bin") +# if os.path.exists(path): +# print('loading.. ', name) +# data_set = load_bin(path, image_size) +# ver_list.append(data_set) +# ver_name_list.append(name) +# +# if args.mode == 0: +# for i in range(len(ver_list)): +# results = [] +# for model in nets: +# acc1, std1, acc2, std2, xnorm, embeddings_list = test( +# ver_list[i], model, args.batch_size, args.nfolds) +# print('[%s]XNorm: %f' % (ver_name_list[i], xnorm)) +# print('[%s]Accuracy: %1.5f+-%1.5f' % (ver_name_list[i], acc1, std1)) +# print('[%s]Accuracy-Flip: %1.5f+-%1.5f' % (ver_name_list[i], acc2, std2)) +# results.append(acc2) +# print('Max of [%s] is %1.5f' % (ver_name_list[i], np.max(results))) +# elif args.mode == 1: +# raise ValueError +# else: +# model = nets[0] +# dumpR(ver_list[0], model, args.batch_size, args.target) diff --git a/arcface_torch/eval_ijbc.py b/arcface_torch/eval_ijbc.py new file mode 100644 index 0000000..9c5a650 --- /dev/null +++ b/arcface_torch/eval_ijbc.py @@ -0,0 +1,483 @@ +# coding: utf-8 + +import os +import pickle + +import matplotlib +import pandas as pd + +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import timeit +import sklearn +import argparse +import cv2 +import numpy as np +import torch +from skimage import transform as trans +from backbones import get_model +from sklearn.metrics import roc_curve, auc + +from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap +from prettytable import PrettyTable +from pathlib import Path + +import sys +import warnings + +sys.path.insert(0, "../") +warnings.filterwarnings("ignore") + +parser = argparse.ArgumentParser(description='do ijb test') +# general +parser.add_argument('--model-prefix', default='', help='path to load model.') +parser.add_argument('--image-path', default='', type=str, help='') +parser.add_argument('--result-dir', default='.', type=str, help='') +parser.add_argument('--batch-size', default=128, type=int, help='') +parser.add_argument('--network', default='iresnet50', type=str, help='') +parser.add_argument('--job', default='insightface', type=str, help='job name') +parser.add_argument('--target', default='IJBC', type=str, help='target, set to IJBC or IJBB') +args = parser.parse_args() + +target = args.target +model_path = args.model_prefix +image_path = args.image_path +result_dir = args.result_dir +gpu_id = None +use_norm_score = True # if Ture, TestMode(N1) +use_detector_score = True # if Ture, TestMode(D1) +use_flip_test = True # if Ture, TestMode(F1) +job = args.job +batch_size = args.batch_size + + +class Embedding(object): + def __init__(self, prefix, data_shape, batch_size=1): + image_size = (112, 112) + self.image_size = image_size + weight = torch.load(prefix) + resnet = get_model(args.network, dropout=0, fp16=False).cuda() + resnet.load_state_dict(weight) + model = torch.nn.DataParallel(resnet) + self.model = model + self.model.eval() + src = np.array([ + [30.2946, 51.6963], + [65.5318, 51.5014], + [48.0252, 71.7366], + [33.5493, 92.3655], + [62.7299, 92.2041]], dtype=np.float32) + src[:, 0] += 8.0 + self.src = src + self.batch_size = batch_size + self.data_shape = data_shape + + def get(self, rimg, landmark): + + assert landmark.shape[0] == 68 or landmark.shape[0] == 5 + assert landmark.shape[1] == 2 + if landmark.shape[0] == 68: + landmark5 = np.zeros((5, 2), dtype=np.float32) + landmark5[0] = (landmark[36] + landmark[39]) / 2 + landmark5[1] = (landmark[42] + landmark[45]) / 2 + landmark5[2] = landmark[30] + landmark5[3] = landmark[48] + landmark5[4] = landmark[54] + else: + landmark5 = landmark + tform = trans.SimilarityTransform() + tform.estimate(landmark5, self.src) + M = tform.params[0:2, :] + img = cv2.warpAffine(rimg, + M, (self.image_size[1], self.image_size[0]), + borderValue=0.0) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + img_flip = np.fliplr(img) + img = np.transpose(img, (2, 0, 1)) # 3*112*112, RGB + img_flip = np.transpose(img_flip, (2, 0, 1)) + input_blob = np.zeros((2, 3, self.image_size[1], self.image_size[0]), dtype=np.uint8) + input_blob[0] = img + input_blob[1] = img_flip + return input_blob + + @torch.no_grad() + def forward_db(self, batch_data): + imgs = torch.Tensor(batch_data).cuda() + imgs.div_(255).sub_(0.5).div_(0.5) + feat = self.model(imgs) + feat = feat.reshape([self.batch_size, 2 * feat.shape[1]]) + return feat.cpu().numpy() + + +# 将一个list尽量均分成n份,限制len(list)==n,份数大于原list内元素个数则分配空list[] +def divideIntoNstrand(listTemp, n): + twoList = [[] for i in range(n)] + for i, e in enumerate(listTemp): + twoList[i % n].append(e) + return twoList + + +def read_template_media_list(path): + # ijb_meta = np.loadtxt(path, dtype=str) + ijb_meta = pd.read_csv(path, sep=' ', header=None).values + templates = ijb_meta[:, 1].astype(np.int) + medias = ijb_meta[:, 2].astype(np.int) + return templates, medias + + +# In[ ]: + + +def read_template_pair_list(path): + # pairs = np.loadtxt(path, dtype=str) + pairs = pd.read_csv(path, sep=' ', header=None).values + # print(pairs.shape) + # print(pairs[:, 0].astype(np.int)) + t1 = pairs[:, 0].astype(np.int) + t2 = pairs[:, 1].astype(np.int) + label = pairs[:, 2].astype(np.int) + return t1, t2, label + + +# In[ ]: + + +def read_image_feature(path): + with open(path, 'rb') as fid: + img_feats = pickle.load(fid) + return img_feats + + +# In[ ]: + + +def get_image_feature(img_path, files_list, model_path, epoch, gpu_id): + batch_size = args.batch_size + data_shape = (3, 112, 112) + + files = files_list + print('files:', len(files)) + rare_size = len(files) % batch_size + faceness_scores = [] + batch = 0 + img_feats = np.empty((len(files), 1024), dtype=np.float32) + + batch_data = np.empty((2 * batch_size, 3, 112, 112)) + embedding = Embedding(model_path, data_shape, batch_size) + for img_index, each_line in enumerate(files[:len(files) - rare_size]): + name_lmk_score = each_line.strip().split(' ') + img_name = os.path.join(img_path, name_lmk_score[0]) + img = cv2.imread(img_name) + lmk = np.array([float(x) for x in name_lmk_score[1:-1]], + dtype=np.float32) + lmk = lmk.reshape((5, 2)) + input_blob = embedding.get(img, lmk) + + batch_data[2 * (img_index - batch * batch_size)][:] = input_blob[0] + batch_data[2 * (img_index - batch * batch_size) + 1][:] = input_blob[1] + if (img_index + 1) % batch_size == 0: + print('batch', batch) + img_feats[batch * batch_size:batch * batch_size + + batch_size][:] = embedding.forward_db(batch_data) + batch += 1 + faceness_scores.append(name_lmk_score[-1]) + + batch_data = np.empty((2 * rare_size, 3, 112, 112)) + embedding = Embedding(model_path, data_shape, rare_size) + for img_index, each_line in enumerate(files[len(files) - rare_size:]): + name_lmk_score = each_line.strip().split(' ') + img_name = os.path.join(img_path, name_lmk_score[0]) + img = cv2.imread(img_name) + lmk = np.array([float(x) for x in name_lmk_score[1:-1]], + dtype=np.float32) + lmk = lmk.reshape((5, 2)) + input_blob = embedding.get(img, lmk) + batch_data[2 * img_index][:] = input_blob[0] + batch_data[2 * img_index + 1][:] = input_blob[1] + if (img_index + 1) % rare_size == 0: + print('batch', batch) + img_feats[len(files) - + rare_size:][:] = embedding.forward_db(batch_data) + batch += 1 + faceness_scores.append(name_lmk_score[-1]) + faceness_scores = np.array(faceness_scores).astype(np.float32) + # img_feats = np.ones( (len(files), 1024), dtype=np.float32) * 0.01 + # faceness_scores = np.ones( (len(files), ), dtype=np.float32 ) + return img_feats, faceness_scores + + +# In[ ]: + + +def image2template_feature(img_feats=None, templates=None, medias=None): + # ========================================================== + # 1. face image feature l2 normalization. img_feats:[number_image x feats_dim] + # 2. compute media feature. + # 3. compute template feature. + # ========================================================== + unique_templates = np.unique(templates) + template_feats = np.zeros((len(unique_templates), img_feats.shape[1])) + + for count_template, uqt in enumerate(unique_templates): + + (ind_t,) = np.where(templates == uqt) + face_norm_feats = img_feats[ind_t] + face_medias = medias[ind_t] + unique_medias, unique_media_counts = np.unique(face_medias, + return_counts=True) + media_norm_feats = [] + for u, ct in zip(unique_medias, unique_media_counts): + (ind_m,) = np.where(face_medias == u) + if ct == 1: + media_norm_feats += [face_norm_feats[ind_m]] + else: # image features from the same video will be aggregated into one feature + media_norm_feats += [ + np.mean(face_norm_feats[ind_m], axis=0, keepdims=True) + ] + media_norm_feats = np.array(media_norm_feats) + # media_norm_feats = media_norm_feats / np.sqrt(np.sum(media_norm_feats ** 2, -1, keepdims=True)) + template_feats[count_template] = np.sum(media_norm_feats, axis=0) + if count_template % 2000 == 0: + print('Finish Calculating {} template features.'.format( + count_template)) + # template_norm_feats = template_feats / np.sqrt(np.sum(template_feats ** 2, -1, keepdims=True)) + template_norm_feats = sklearn.preprocessing.normalize(template_feats) + # print(template_norm_feats.shape) + return template_norm_feats, unique_templates + + +# In[ ]: + + +def verification(template_norm_feats=None, + unique_templates=None, + p1=None, + p2=None): + # ========================================================== + # Compute set-to-set Similarity Score. + # ========================================================== + template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int) + for count_template, uqt in enumerate(unique_templates): + template2id[uqt] = count_template + + score = np.zeros((len(p1),)) # save cosine distance between pairs + + total_pairs = np.array(range(len(p1))) + batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation + sublists = [ + total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize) + ] + total_sublists = len(sublists) + for c, s in enumerate(sublists): + feat1 = template_norm_feats[template2id[p1[s]]] + feat2 = template_norm_feats[template2id[p2[s]]] + similarity_score = np.sum(feat1 * feat2, -1) + score[s] = similarity_score.flatten() + if c % 10 == 0: + print('Finish {}/{} pairs.'.format(c, total_sublists)) + return score + + +# In[ ]: +def verification2(template_norm_feats=None, + unique_templates=None, + p1=None, + p2=None): + template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int) + for count_template, uqt in enumerate(unique_templates): + template2id[uqt] = count_template + score = np.zeros((len(p1),)) # save cosine distance between pairs + total_pairs = np.array(range(len(p1))) + batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation + sublists = [ + total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize) + ] + total_sublists = len(sublists) + for c, s in enumerate(sublists): + feat1 = template_norm_feats[template2id[p1[s]]] + feat2 = template_norm_feats[template2id[p2[s]]] + similarity_score = np.sum(feat1 * feat2, -1) + score[s] = similarity_score.flatten() + if c % 10 == 0: + print('Finish {}/{} pairs.'.format(c, total_sublists)) + return score + + +def read_score(path): + with open(path, 'rb') as fid: + img_feats = pickle.load(fid) + return img_feats + + +# # Step1: Load Meta Data + +# In[ ]: + +assert target == 'IJBC' or target == 'IJBB' + +# ============================================================= +# load image and template relationships for template feature embedding +# tid --> template id, mid --> media id +# format: +# image_name tid mid +# ============================================================= +start = timeit.default_timer() +templates, medias = read_template_media_list( + os.path.join('%s/meta' % image_path, + '%s_face_tid_mid.txt' % target.lower())) +stop = timeit.default_timer() +print('Time: %.2f s. ' % (stop - start)) + +# In[ ]: + +# ============================================================= +# load template pairs for template-to-template verification +# tid : template id, label : 1/0 +# format: +# tid_1 tid_2 label +# ============================================================= +start = timeit.default_timer() +p1, p2, label = read_template_pair_list( + os.path.join('%s/meta' % image_path, + '%s_template_pair_label.txt' % target.lower())) +stop = timeit.default_timer() +print('Time: %.2f s. ' % (stop - start)) + +# # Step 2: Get Image Features + +# In[ ]: + +# ============================================================= +# load image features +# format: +# img_feats: [image_num x feats_dim] (227630, 512) +# ============================================================= +start = timeit.default_timer() +img_path = '%s/loose_crop' % image_path +img_list_path = '%s/meta/%s_name_5pts_score.txt' % (image_path, target.lower()) +img_list = open(img_list_path) +files = img_list.readlines() +# files_list = divideIntoNstrand(files, rank_size) +files_list = files + +# img_feats +# for i in range(rank_size): +img_feats, faceness_scores = get_image_feature(img_path, files_list, + model_path, 0, gpu_id) +stop = timeit.default_timer() +print('Time: %.2f s. ' % (stop - start)) +print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], + img_feats.shape[1])) + +# # Step3: Get Template Features + +# In[ ]: + +# ============================================================= +# compute template features from image features. +# ============================================================= +start = timeit.default_timer() +# ========================================================== +# Norm feature before aggregation into template feature? +# Feature norm from embedding network and faceness score are able to decrease weights for noise samples (not face). +# ========================================================== +# 1. FaceScore (Feature Norm) +# 2. FaceScore (Detector) + +if use_flip_test: + # concat --- F1 + # img_input_feats = img_feats + # add --- F2 + img_input_feats = img_feats[:, 0:img_feats.shape[1] // + 2] + img_feats[:, img_feats.shape[1] // 2:] +else: + img_input_feats = img_feats[:, 0:img_feats.shape[1] // 2] + +if use_norm_score: + img_input_feats = img_input_feats +else: + # normalise features to remove norm information + img_input_feats = img_input_feats / np.sqrt( + np.sum(img_input_feats ** 2, -1, keepdims=True)) + +if use_detector_score: + print(img_input_feats.shape, faceness_scores.shape) + img_input_feats = img_input_feats * faceness_scores[:, np.newaxis] +else: + img_input_feats = img_input_feats + +template_norm_feats, unique_templates = image2template_feature( + img_input_feats, templates, medias) +stop = timeit.default_timer() +print('Time: %.2f s. ' % (stop - start)) + +# # Step 4: Get Template Similarity Scores + +# In[ ]: + +# ============================================================= +# compute verification scores between template pairs. +# ============================================================= +start = timeit.default_timer() +score = verification(template_norm_feats, unique_templates, p1, p2) +stop = timeit.default_timer() +print('Time: %.2f s. ' % (stop - start)) + +# In[ ]: +save_path = os.path.join(result_dir, args.job) +# save_path = result_dir + '/%s_result' % target + +if not os.path.exists(save_path): + os.makedirs(save_path) + +score_save_file = os.path.join(save_path, "%s.npy" % target.lower()) +np.save(score_save_file, score) + +# # Step 5: Get ROC Curves and TPR@FPR Table + +# In[ ]: + +files = [score_save_file] +methods = [] +scores = [] +for file in files: + methods.append(Path(file).stem) + scores.append(np.load(file)) + +methods = np.array(methods) +scores = dict(zip(methods, scores)) +colours = dict( + zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2'))) +x_labels = [10 ** -6, 10 ** -5, 10 ** -4, 10 ** -3, 10 ** -2, 10 ** -1] +tpr_fpr_table = PrettyTable(['Methods'] + [str(x) for x in x_labels]) +fig = plt.figure() +for method in methods: + fpr, tpr, _ = roc_curve(label, scores[method]) + roc_auc = auc(fpr, tpr) + fpr = np.flipud(fpr) + tpr = np.flipud(tpr) # select largest tpr at same fpr + plt.plot(fpr, + tpr, + color=colours[method], + lw=1, + label=('[%s (AUC = %0.4f %%)]' % + (method.split('-')[-1], roc_auc * 100))) + tpr_fpr_row = [] + tpr_fpr_row.append("%s-%s" % (method, target)) + for fpr_iter in np.arange(len(x_labels)): + _, min_index = min( + list(zip(abs(fpr - x_labels[fpr_iter]), range(len(fpr))))) + tpr_fpr_row.append('%.2f' % (tpr[min_index] * 100)) + tpr_fpr_table.add_row(tpr_fpr_row) +plt.xlim([10 ** -6, 0.1]) +plt.ylim([0.3, 1.0]) +plt.grid(linestyle='--', linewidth=1) +plt.xticks(x_labels) +plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) +plt.xscale('log') +plt.xlabel('False Positive Rate') +plt.ylabel('True Positive Rate') +plt.title('ROC on IJB') +plt.legend(loc="lower right") +fig.savefig(os.path.join(save_path, '%s.pdf' % target.lower())) +print(tpr_fpr_table) diff --git a/arcface_torch/inference.py b/arcface_torch/inference.py new file mode 100644 index 0000000..3e5156e --- /dev/null +++ b/arcface_torch/inference.py @@ -0,0 +1,35 @@ +import argparse + +import cv2 +import numpy as np +import torch + +from backbones import get_model + + +@torch.no_grad() +def inference(weight, name, img): + if img is None: + img = np.random.randint(0, 255, size=(112, 112, 3), dtype=np.uint8) + else: + img = cv2.imread(img) + img = cv2.resize(img, (112, 112)) + + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + img = np.transpose(img, (2, 0, 1)) + img = torch.from_numpy(img).unsqueeze(0).float() + img.div_(255).sub_(0.5).div_(0.5) + net = get_model(name, fp16=False) + net.load_state_dict(torch.load(weight)) + net.eval() + feat = net(img).numpy() + print(feat) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='PyTorch ArcFace Training') + parser.add_argument('--network', type=str, default='r50', help='backbone network') + parser.add_argument('--weight', type=str, default='') + parser.add_argument('--img', type=str, default=None) + args = parser.parse_args() + inference(args.weight, args.network, args.img) diff --git a/arcface_torch/losses.py b/arcface_torch/losses.py new file mode 100644 index 0000000..697b4d8 --- /dev/null +++ b/arcface_torch/losses.py @@ -0,0 +1,47 @@ +import torch +import math + +class ArcFace(torch.nn.Module): + """ ArcFace (https://arxiv.org/pdf/1801.07698v1.pdf): + """ + def __init__(self, s=64.0, margin=0.5): + super(ArcFace, self).__init__() + self.scale = s + self.cos_m = math.cos(margin) + self.sin_m = math.sin(margin) + self.theta = math.cos(math.pi - margin) + self.sinmm = math.sin(math.pi - margin) * margin + self.easy_margin = False + + + def forward(self, logits: torch.Tensor, labels: torch.Tensor): + index = torch.where(labels != -1)[0] + target_logit = logits[index, labels[index].view(-1)] + + sin_theta = torch.sqrt(1.0 - torch.pow(target_logit, 2)) + cos_theta_m = target_logit * self.cos_m - sin_theta * self.sin_m # cos(target+margin) + if self.easy_margin: + final_target_logit = torch.where( + target_logit > 0, cos_theta_m, target_logit) + else: + final_target_logit = torch.where( + target_logit > self.theta, cos_theta_m, target_logit - self.sinmm) + + logits[index, labels[index].view(-1)] = final_target_logit + logits = logits * self.scale + return logits + + +class CosFace(torch.nn.Module): + def __init__(self, s=64.0, m=0.40): + super(CosFace, self).__init__() + self.s = s + self.m = m + + def forward(self, logits: torch.Tensor, labels: torch.Tensor): + index = torch.where(labels != -1)[0] + target_logit = logits[index, labels[index].view(-1)] + final_target_logit = target_logit - self.m + logits[index, labels[index].view(-1)] = final_target_logit + logits = logits * self.s + return logits diff --git a/arcface_torch/lr_scheduler.py b/arcface_torch/lr_scheduler.py new file mode 100644 index 0000000..4248964 --- /dev/null +++ b/arcface_torch/lr_scheduler.py @@ -0,0 +1,29 @@ +from torch.optim.lr_scheduler import _LRScheduler + + +class PolyScheduler(_LRScheduler): + def __init__(self, optimizer, base_lr, max_steps, warmup_steps, last_epoch=-1): + self.base_lr = base_lr + self.warmup_lr_init = 0.0001 + self.max_steps: int = max_steps + self.warmup_steps: int = warmup_steps + self.power = 2 + super(PolyScheduler, self).__init__(optimizer, last_epoch, False) + + def get_warmup_lr(self): + alpha = float(self.last_epoch) / float(self.warmup_steps) + return [self.base_lr * alpha for _ in self.optimizer.param_groups] + + def get_lr(self): + if self.last_epoch == -1: + return [self.warmup_lr_init for _ in self.optimizer.param_groups] + if self.last_epoch < self.warmup_steps: + return self.get_warmup_lr() + else: + alpha = pow( + 1 + - float(self.last_epoch - self.warmup_steps) + / float(self.max_steps - self.warmup_steps), + self.power, + ) + return [self.base_lr * alpha for _ in self.optimizer.param_groups] diff --git a/arcface_torch/onnx_helper.py b/arcface_torch/onnx_helper.py new file mode 100644 index 0000000..ca922ca --- /dev/null +++ b/arcface_torch/onnx_helper.py @@ -0,0 +1,250 @@ +from __future__ import division +import datetime +import os +import os.path as osp +import glob +import numpy as np +import cv2 +import sys +import onnxruntime +import onnx +import argparse +from onnx import numpy_helper +from insightface.data import get_image + +class ArcFaceORT: + def __init__(self, model_path, cpu=False): + self.model_path = model_path + # providers = None will use available provider, for onnxruntime-gpu it will be "CUDAExecutionProvider" + self.providers = ['CPUExecutionProvider'] if cpu else None + + #input_size is (w,h), return error message, return None if success + def check(self, track='cfat', test_img = None): + #default is cfat + max_model_size_mb=1024 + max_feat_dim=512 + max_time_cost=15 + if track.startswith('ms1m'): + max_model_size_mb=1024 + max_feat_dim=512 + max_time_cost=10 + elif track.startswith('glint'): + max_model_size_mb=1024 + max_feat_dim=1024 + max_time_cost=20 + elif track.startswith('cfat'): + max_model_size_mb = 1024 + max_feat_dim = 512 + max_time_cost = 15 + elif track.startswith('unconstrained'): + max_model_size_mb=1024 + max_feat_dim=1024 + max_time_cost=30 + else: + return "track not found" + + if not os.path.exists(self.model_path): + return "model_path not exists" + if not os.path.isdir(self.model_path): + return "model_path should be directory" + onnx_files = [] + for _file in os.listdir(self.model_path): + if _file.endswith('.onnx'): + onnx_files.append(osp.join(self.model_path, _file)) + if len(onnx_files)==0: + return "do not have onnx files" + self.model_file = sorted(onnx_files)[-1] + print('use onnx-model:', self.model_file) + try: + session = onnxruntime.InferenceSession(self.model_file, providers=self.providers) + except: + return "load onnx failed" + input_cfg = session.get_inputs()[0] + input_shape = input_cfg.shape + print('input-shape:', input_shape) + if len(input_shape)!=4: + return "length of input_shape should be 4" + if not isinstance(input_shape[0], str): + #return "input_shape[0] should be str to support batch-inference" + print('reset input-shape[0] to None') + model = onnx.load(self.model_file) + model.graph.input[0].type.tensor_type.shape.dim[0].dim_param = 'None' + new_model_file = osp.join(self.model_path, 'zzzzrefined.onnx') + onnx.save(model, new_model_file) + self.model_file = new_model_file + print('use new onnx-model:', self.model_file) + try: + session = onnxruntime.InferenceSession(self.model_file, providers=self.providers) + except: + return "load onnx failed" + input_cfg = session.get_inputs()[0] + input_shape = input_cfg.shape + print('new-input-shape:', input_shape) + + self.image_size = tuple(input_shape[2:4][::-1]) + #print('image_size:', self.image_size) + input_name = input_cfg.name + outputs = session.get_outputs() + output_names = [] + for o in outputs: + output_names.append(o.name) + #print(o.name, o.shape) + if len(output_names)!=1: + return "number of output nodes should be 1" + self.session = session + self.input_name = input_name + self.output_names = output_names + #print(self.output_names) + model = onnx.load(self.model_file) + graph = model.graph + if len(graph.node)<8: + return "too small onnx graph" + + input_size = (112,112) + self.crop = None + if track=='cfat': + crop_file = osp.join(self.model_path, 'crop.txt') + if osp.exists(crop_file): + lines = open(crop_file,'r').readlines() + if len(lines)!=6: + return "crop.txt should contain 6 lines" + lines = [int(x) for x in lines] + self.crop = lines[:4] + input_size = tuple(lines[4:6]) + if input_size!=self.image_size: + return "input-size is inconsistant with onnx model input, %s vs %s"%(input_size, self.image_size) + + self.model_size_mb = os.path.getsize(self.model_file) / float(1024*1024) + if self.model_size_mb > max_model_size_mb: + return "max model size exceed, given %.3f-MB"%self.model_size_mb + + input_mean = None + input_std = None + if track=='cfat': + pn_file = osp.join(self.model_path, 'pixel_norm.txt') + if osp.exists(pn_file): + lines = open(pn_file,'r').readlines() + if len(lines)!=2: + return "pixel_norm.txt should contain 2 lines" + input_mean = float(lines[0]) + input_std = float(lines[1]) + if input_mean is not None or input_std is not None: + if input_mean is None or input_std is None: + return "please set input_mean and input_std simultaneously" + else: + find_sub = False + find_mul = False + for nid, node in enumerate(graph.node[:8]): + print(nid, node.name) + if node.name.startswith('Sub') or node.name.startswith('_minus'): + find_sub = True + if node.name.startswith('Mul') or node.name.startswith('_mul') or node.name.startswith('Div'): + find_mul = True + if find_sub and find_mul: + print("find sub and mul") + #mxnet arcface model + input_mean = 0.0 + input_std = 1.0 + else: + input_mean = 127.5 + input_std = 127.5 + self.input_mean = input_mean + self.input_std = input_std + for initn in graph.initializer: + weight_array = numpy_helper.to_array(initn) + dt = weight_array.dtype + if dt.itemsize<4: + return 'invalid weight type - (%s:%s)' % (initn.name, dt.name) + if test_img is None: + test_img = get_image('Tom_Hanks_54745') + test_img = cv2.resize(test_img, self.image_size) + else: + test_img = cv2.resize(test_img, self.image_size) + feat, cost = self.benchmark(test_img) + batch_result = self.check_batch(test_img) + batch_result_sum = float(np.sum(batch_result)) + if batch_result_sum in [float('inf'), -float('inf')] or batch_result_sum != batch_result_sum: + print(batch_result) + print(batch_result_sum) + return "batch result output contains NaN!" + + if len(feat.shape) < 2: + return "the shape of the feature must be two, but get {}".format(str(feat.shape)) + + if feat.shape[1] > max_feat_dim: + return "max feat dim exceed, given %d"%feat.shape[1] + self.feat_dim = feat.shape[1] + cost_ms = cost*1000 + if cost_ms>max_time_cost: + return "max time cost exceed, given %.4f"%cost_ms + self.cost_ms = cost_ms + print('check stat:, model-size-mb: %.4f, feat-dim: %d, time-cost-ms: %.4f, input-mean: %.3f, input-std: %.3f'%(self.model_size_mb, self.feat_dim, self.cost_ms, self.input_mean, self.input_std)) + return None + + def check_batch(self, img): + if not isinstance(img, list): + imgs = [img, ] * 32 + if self.crop is not None: + nimgs = [] + for img in imgs: + nimg = img[self.crop[1]:self.crop[3], self.crop[0]:self.crop[2], :] + if nimg.shape[0] != self.image_size[1] or nimg.shape[1] != self.image_size[0]: + nimg = cv2.resize(nimg, self.image_size) + nimgs.append(nimg) + imgs = nimgs + blob = cv2.dnn.blobFromImages( + images=imgs, scalefactor=1.0 / self.input_std, size=self.image_size, + mean=(self.input_mean, self.input_mean, self.input_mean), swapRB=True) + net_out = self.session.run(self.output_names, {self.input_name: blob})[0] + return net_out + + + def meta_info(self): + return {'model-size-mb':self.model_size_mb, 'feature-dim':self.feat_dim, 'infer': self.cost_ms} + + + def forward(self, imgs): + if not isinstance(imgs, list): + imgs = [imgs] + input_size = self.image_size + if self.crop is not None: + nimgs = [] + for img in imgs: + nimg = img[self.crop[1]:self.crop[3],self.crop[0]:self.crop[2],:] + if nimg.shape[0]!=input_size[1] or nimg.shape[1]!=input_size[0]: + nimg = cv2.resize(nimg, input_size) + nimgs.append(nimg) + imgs = nimgs + blob = cv2.dnn.blobFromImages(imgs, 1.0/self.input_std, input_size, (self.input_mean, self.input_mean, self.input_mean), swapRB=True) + net_out = self.session.run(self.output_names, {self.input_name : blob})[0] + return net_out + + def benchmark(self, img): + input_size = self.image_size + if self.crop is not None: + nimg = img[self.crop[1]:self.crop[3],self.crop[0]:self.crop[2],:] + if nimg.shape[0]!=input_size[1] or nimg.shape[1]!=input_size[0]: + nimg = cv2.resize(nimg, input_size) + img = nimg + blob = cv2.dnn.blobFromImage(img, 1.0/self.input_std, input_size, (self.input_mean, self.input_mean, self.input_mean), swapRB=True) + costs = [] + for _ in range(50): + ta = datetime.datetime.now() + net_out = self.session.run(self.output_names, {self.input_name : blob})[0] + tb = datetime.datetime.now() + cost = (tb-ta).total_seconds() + costs.append(cost) + costs = sorted(costs) + cost = costs[5] + return net_out, cost + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='') + # general + parser.add_argument('workdir', help='submitted work dir', type=str) + parser.add_argument('--track', help='track name, for different challenge', type=str, default='cfat') + args = parser.parse_args() + handler = ArcFaceORT(args.workdir) + err = handler.check(args.track) + print('err:', err) diff --git a/arcface_torch/onnx_ijbc.py b/arcface_torch/onnx_ijbc.py new file mode 100644 index 0000000..31c491b --- /dev/null +++ b/arcface_torch/onnx_ijbc.py @@ -0,0 +1,269 @@ +import argparse +import os +import pickle +import timeit + +import cv2 +import mxnet as mx +import numpy as np +import pandas as pd +import prettytable +import skimage.transform +import torch +from sklearn.metrics import roc_curve +from sklearn.preprocessing import normalize +from torch.utils.data import DataLoader +from onnx_helper import ArcFaceORT + +SRC = np.array( + [ + [30.2946, 51.6963], + [65.5318, 51.5014], + [48.0252, 71.7366], + [33.5493, 92.3655], + [62.7299, 92.2041]] + , dtype=np.float32) +SRC[:, 0] += 8.0 + + +@torch.no_grad() +class AlignedDataSet(mx.gluon.data.Dataset): + def __init__(self, root, lines, align=True): + self.lines = lines + self.root = root + self.align = align + + def __len__(self): + return len(self.lines) + + def __getitem__(self, idx): + each_line = self.lines[idx] + name_lmk_score = each_line.strip().split(' ') + name = os.path.join(self.root, name_lmk_score[0]) + img = cv2.cvtColor(cv2.imread(name), cv2.COLOR_BGR2RGB) + landmark5 = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32).reshape((5, 2)) + st = skimage.transform.SimilarityTransform() + st.estimate(landmark5, SRC) + img = cv2.warpAffine(img, st.params[0:2, :], (112, 112), borderValue=0.0) + img_1 = np.expand_dims(img, 0) + img_2 = np.expand_dims(np.fliplr(img), 0) + output = np.concatenate((img_1, img_2), axis=0).astype(np.float32) + output = np.transpose(output, (0, 3, 1, 2)) + return torch.from_numpy(output) + + +@torch.no_grad() +def extract(model_root, dataset): + model = ArcFaceORT(model_path=model_root) + model.check() + feat_mat = np.zeros(shape=(len(dataset), 2 * model.feat_dim)) + + def collate_fn(data): + return torch.cat(data, dim=0) + + data_loader = DataLoader( + dataset, batch_size=128, drop_last=False, num_workers=4, collate_fn=collate_fn, ) + num_iter = 0 + for batch in data_loader: + batch = batch.numpy() + batch = (batch - model.input_mean) / model.input_std + feat = model.session.run(model.output_names, {model.input_name: batch})[0] + feat = np.reshape(feat, (-1, model.feat_dim * 2)) + feat_mat[128 * num_iter: 128 * num_iter + feat.shape[0], :] = feat + num_iter += 1 + if num_iter % 50 == 0: + print(num_iter) + return feat_mat + + +def read_template_media_list(path): + ijb_meta = pd.read_csv(path, sep=' ', header=None).values + templates = ijb_meta[:, 1].astype(np.int) + medias = ijb_meta[:, 2].astype(np.int) + return templates, medias + + +def read_template_pair_list(path): + pairs = pd.read_csv(path, sep=' ', header=None).values + t1 = pairs[:, 0].astype(np.int) + t2 = pairs[:, 1].astype(np.int) + label = pairs[:, 2].astype(np.int) + return t1, t2, label + + +def read_image_feature(path): + with open(path, 'rb') as fid: + img_feats = pickle.load(fid) + return img_feats + + +def image2template_feature(img_feats=None, + templates=None, + medias=None): + unique_templates = np.unique(templates) + template_feats = np.zeros((len(unique_templates), img_feats.shape[1])) + for count_template, uqt in enumerate(unique_templates): + (ind_t,) = np.where(templates == uqt) + face_norm_feats = img_feats[ind_t] + face_medias = medias[ind_t] + unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True) + media_norm_feats = [] + for u, ct in zip(unique_medias, unique_media_counts): + (ind_m,) = np.where(face_medias == u) + if ct == 1: + media_norm_feats += [face_norm_feats[ind_m]] + else: # image features from the same video will be aggregated into one feature + media_norm_feats += [np.mean(face_norm_feats[ind_m], axis=0, keepdims=True), ] + media_norm_feats = np.array(media_norm_feats) + template_feats[count_template] = np.sum(media_norm_feats, axis=0) + if count_template % 2000 == 0: + print('Finish Calculating {} template features.'.format( + count_template)) + template_norm_feats = normalize(template_feats) + return template_norm_feats, unique_templates + + +def verification(template_norm_feats=None, + unique_templates=None, + p1=None, + p2=None): + template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int) + for count_template, uqt in enumerate(unique_templates): + template2id[uqt] = count_template + score = np.zeros((len(p1),)) + total_pairs = np.array(range(len(p1))) + batchsize = 100000 + sublists = [total_pairs[i: i + batchsize] for i in range(0, len(p1), batchsize)] + total_sublists = len(sublists) + for c, s in enumerate(sublists): + feat1 = template_norm_feats[template2id[p1[s]]] + feat2 = template_norm_feats[template2id[p2[s]]] + similarity_score = np.sum(feat1 * feat2, -1) + score[s] = similarity_score.flatten() + if c % 10 == 0: + print('Finish {}/{} pairs.'.format(c, total_sublists)) + return score + + +def verification2(template_norm_feats=None, + unique_templates=None, + p1=None, + p2=None): + template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int) + for count_template, uqt in enumerate(unique_templates): + template2id[uqt] = count_template + score = np.zeros((len(p1),)) # save cosine distance between pairs + total_pairs = np.array(range(len(p1))) + batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation + sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)] + total_sublists = len(sublists) + for c, s in enumerate(sublists): + feat1 = template_norm_feats[template2id[p1[s]]] + feat2 = template_norm_feats[template2id[p2[s]]] + similarity_score = np.sum(feat1 * feat2, -1) + score[s] = similarity_score.flatten() + if c % 10 == 0: + print('Finish {}/{} pairs.'.format(c, total_sublists)) + return score + + +def main(args): + use_norm_score = True # if Ture, TestMode(N1) + use_detector_score = True # if Ture, TestMode(D1) + use_flip_test = True # if Ture, TestMode(F1) + assert args.target == 'IJBC' or args.target == 'IJBB' + + start = timeit.default_timer() + templates, medias = read_template_media_list( + os.path.join('%s/meta' % args.image_path, '%s_face_tid_mid.txt' % args.target.lower())) + stop = timeit.default_timer() + print('Time: %.2f s. ' % (stop - start)) + + start = timeit.default_timer() + p1, p2, label = read_template_pair_list( + os.path.join('%s/meta' % args.image_path, + '%s_template_pair_label.txt' % args.target.lower())) + stop = timeit.default_timer() + print('Time: %.2f s. ' % (stop - start)) + + start = timeit.default_timer() + img_path = '%s/loose_crop' % args.image_path + img_list_path = '%s/meta/%s_name_5pts_score.txt' % (args.image_path, args.target.lower()) + img_list = open(img_list_path) + files = img_list.readlines() + dataset = AlignedDataSet(root=img_path, lines=files, align=True) + img_feats = extract(args.model_root, dataset) + + faceness_scores = [] + for each_line in files: + name_lmk_score = each_line.split() + faceness_scores.append(name_lmk_score[-1]) + faceness_scores = np.array(faceness_scores).astype(np.float32) + stop = timeit.default_timer() + print('Time: %.2f s. ' % (stop - start)) + print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1])) + start = timeit.default_timer() + + if use_flip_test: + img_input_feats = img_feats[:, 0:img_feats.shape[1] // 2] + img_feats[:, img_feats.shape[1] // 2:] + else: + img_input_feats = img_feats[:, 0:img_feats.shape[1] // 2] + + if use_norm_score: + img_input_feats = img_input_feats + else: + img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True)) + + if use_detector_score: + print(img_input_feats.shape, faceness_scores.shape) + img_input_feats = img_input_feats * faceness_scores[:, np.newaxis] + else: + img_input_feats = img_input_feats + + template_norm_feats, unique_templates = image2template_feature( + img_input_feats, templates, medias) + stop = timeit.default_timer() + print('Time: %.2f s. ' % (stop - start)) + + start = timeit.default_timer() + score = verification(template_norm_feats, unique_templates, p1, p2) + stop = timeit.default_timer() + print('Time: %.2f s. ' % (stop - start)) + result_dir = args.model_root + + save_path = os.path.join(result_dir, "{}_result".format(args.target)) + if not os.path.exists(save_path): + os.makedirs(save_path) + score_save_file = os.path.join(save_path, "{}.npy".format(args.target)) + np.save(score_save_file, score) + files = [score_save_file] + methods = [] + scores = [] + for file in files: + methods.append(os.path.basename(file)) + scores.append(np.load(file)) + methods = np.array(methods) + scores = dict(zip(methods, scores)) + x_labels = [10 ** -6, 10 ** -5, 10 ** -4, 10 ** -3, 10 ** -2, 10 ** -1] + tpr_fpr_table = prettytable.PrettyTable(['Methods'] + [str(x) for x in x_labels]) + for method in methods: + fpr, tpr, _ = roc_curve(label, scores[method]) + fpr = np.flipud(fpr) + tpr = np.flipud(tpr) + tpr_fpr_row = [] + tpr_fpr_row.append("%s-%s" % (method, args.target)) + for fpr_iter in np.arange(len(x_labels)): + _, min_index = min( + list(zip(abs(fpr - x_labels[fpr_iter]), range(len(fpr))))) + tpr_fpr_row.append('%.2f' % (tpr[min_index] * 100)) + tpr_fpr_table.add_row(tpr_fpr_row) + print(tpr_fpr_table) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='do ijb test') + # general + parser.add_argument('--model-root', default='', help='path to load model.') + parser.add_argument('--image-path', default='/train_tmp/IJB_release/IJBC', type=str, help='') + parser.add_argument('--target', default='IJBC', type=str, help='target, set to IJBC or IJBB') + main(parser.parse_args()) diff --git a/arcface_torch/partial_fc.py b/arcface_torch/partial_fc.py new file mode 100644 index 0000000..d4f7e68 --- /dev/null +++ b/arcface_torch/partial_fc.py @@ -0,0 +1,330 @@ +import collections +from typing import Callable + +import torch +from torch import distributed +from torch.nn.functional import linear, normalize + + +class PartialFC(torch.nn.Module): + """ + https://arxiv.org/abs/2010.05222 + A distributed sparsely updating variant of the FC layer, named Partial FC (PFC). + + When sample rate less than 1, in each iteration, positive class centers and a random subset of + negative class centers are selected to compute the margin-based softmax loss, all class + centers are still maintained throughout the whole training process, but only a subset is + selected and updated in each iteration. + + .. note:: + When sample rate equal to 1, Partial FC is equal to model parallelism(default sample rate is 1). + + Example: + -------- + >>> module_pfc = PartialFC(embedding_size=512, num_classes=8000000, sample_rate=0.2) + >>> for img, labels in data_loader: + >>> embeddings = net(img) + >>> loss = module_pfc(embeddings, labels, optimizer) + >>> loss.backward() + >>> optimizer.step() + """ + _version = 1 + def __init__( + self, + margin_loss: Callable, + embedding_size: int, + num_classes: int, + sample_rate: float = 1.0, + fp16: bool = False, + ): + """ + Paramenters: + ----------- + embedding_size: int + The dimension of embedding, required + num_classes: int + Total number of classes, required + sample_rate: float + The rate of negative centers participating in the calculation, default is 1.0. + """ + super(PartialFC, self).__init__() + assert ( + distributed.is_initialized() + ), "must initialize distributed before create this" + self.rank = distributed.get_rank() + self.world_size = distributed.get_world_size() + + self.dist_cross_entropy = DistCrossEntropy() + self.embedding_size = embedding_size + self.sample_rate: float = sample_rate + self.fp16 = fp16 + self.num_local: int = num_classes // self.world_size + int( + self.rank < num_classes % self.world_size + ) + self.class_start: int = num_classes // self.world_size * self.rank + min( + self.rank, num_classes % self.world_size + ) + self.num_sample: int = int(self.sample_rate * self.num_local) + self.last_batch_size: int = 0 + self.weight: torch.Tensor + self.weight_mom: torch.Tensor + self.weight_activated: torch.nn.Parameter + self.weight_activated_mom: torch.Tensor + self.is_updated: bool = True + self.init_weight_update: bool = True + + if self.sample_rate < 1: + self.register_buffer("weight", + tensor=torch.normal(0, 0.01, (self.num_local, embedding_size))) + self.register_buffer("weight_mom", + tensor=torch.zeros_like(self.weight)) + self.register_parameter("weight_activated", + param=torch.nn.Parameter(torch.empty(0, 0))) + self.register_buffer("weight_activated_mom", + tensor=torch.empty(0, 0)) + self.register_buffer("weight_index", + tensor=torch.empty(0, 0)) + else: + self.weight_activated = torch.nn.Parameter(torch.normal(0, 0.01, (self.num_local, embedding_size))) + + # margin_loss + if isinstance(margin_loss, Callable): + self.margin_softmax = margin_loss + else: + raise + + @torch.no_grad() + def sample(self, + labels: torch.Tensor, + index_positive: torch.Tensor, + optimizer: torch.optim.Optimizer): + """ + This functions will change the value of labels + + Parameters: + ----------- + labels: torch.Tensor + pass + index_positive: torch.Tensor + pass + optimizer: torch.optim.Optimizer + pass + """ + positive = torch.unique(labels[index_positive], sorted=True).cuda() + if self.num_sample - positive.size(0) >= 0: + perm = torch.rand(size=[self.num_local]).cuda() + perm[positive] = 2.0 + index = torch.topk(perm, k=self.num_sample)[1].cuda() + index = index.sort()[0].cuda() + else: + index = positive + self.weight_index = index + + labels[index_positive] = torch.searchsorted(index, labels[index_positive]) + + self.weight_activated = torch.nn.Parameter(self.weight[self.weight_index]) + self.weight_activated_mom = self.weight_mom[self.weight_index] + + if isinstance(optimizer, torch.optim.SGD): + # TODO the params of partial fc must be last in the params list + optimizer.state.pop(optimizer.param_groups[-1]["params"][0], None) + optimizer.param_groups[-1]["params"][0] = self.weight_activated + optimizer.state[self.weight_activated][ + "momentum_buffer" + ] = self.weight_activated_mom + else: + raise + + @torch.no_grad() + def update(self): + """ partial weight to global + """ + if self.init_weight_update: + self.init_weight_update = False + return + + if self.sample_rate < 1: + self.weight[self.weight_index] = self.weight_activated + self.weight_mom[self.weight_index] = self.weight_activated_mom + + + def forward( + self, + local_embeddings: torch.Tensor, + local_labels: torch.Tensor, + optimizer: torch.optim.Optimizer, + ): + """ + Parameters: + ---------- + local_embeddings: torch.Tensor + feature embeddings on each GPU(Rank). + local_labels: torch.Tensor + labels on each GPU(Rank). + + Returns: + ------- + loss: torch.Tensor + pass + """ + local_labels.squeeze_() + local_labels = local_labels.long() + self.update() + + batch_size = local_embeddings.size(0) + if self.last_batch_size == 0: + self.last_batch_size = batch_size + assert self.last_batch_size == batch_size, ( + "last batch size do not equal current batch size: {} vs {}".format( + self.last_batch_size, batch_size)) + + _gather_embeddings = [ + torch.zeros((batch_size, self.embedding_size)).cuda() + for _ in range(self.world_size) + ] + _gather_labels = [ + torch.zeros(batch_size).long().cuda() for _ in range(self.world_size) + ] + _list_embeddings = AllGather(local_embeddings, *_gather_embeddings) + distributed.all_gather(_gather_labels, local_labels) + + embeddings = torch.cat(_list_embeddings) + labels = torch.cat(_gather_labels) + + labels = labels.view(-1, 1) + index_positive = (self.class_start <= labels) & ( + labels < self.class_start + self.num_local + ) + labels[~index_positive] = -1 + labels[index_positive] -= self.class_start + + if self.sample_rate < 1: + self.sample(labels, index_positive, optimizer) + + with torch.cuda.amp.autocast(self.fp16): + norm_embeddings = normalize(embeddings) + norm_weight_activated = normalize(self.weight_activated) + logits = linear(norm_embeddings, norm_weight_activated) + if self.fp16: + logits = logits.float() + logits = logits.clamp(-1, 1) + + logits = self.margin_softmax(logits, labels) + loss = self.dist_cross_entropy(logits, labels) + return loss + + def state_dict(self, destination=None, prefix="", keep_vars=False): + if destination is None: + destination = collections.OrderedDict() + destination._metadata = collections.OrderedDict() + + for name, module in self._modules.items(): + if module is not None: + module.state_dict(destination, prefix + name + ".", keep_vars=keep_vars) + if self.sample_rate < 1: + destination["weight"] = self.weight.detach() + else: + destination["weight"] = self.weight_activated.data.detach() + return destination + + def load_state_dict(self, state_dict, strict: bool = True): + if self.sample_rate < 1: + self.weight = state_dict["weight"].to(self.weight.device) + self.weight_mom.zero_() + self.weight_activated.data.zero_() + self.weight_activated_mom.zero_() + self.weight_index.zero_() + else: + self.weight_activated.data = state_dict["weight"].to(self.weight_activated.data.device) + +class DistCrossEntropyFunc(torch.autograd.Function): + """ + CrossEntropy loss is calculated in parallel, allreduce denominator into single gpu and calculate softmax. + Implemented of ArcFace (https://arxiv.org/pdf/1801.07698v1.pdf): + """ + + @staticmethod + def forward(ctx, logits: torch.Tensor, label: torch.Tensor): + """ """ + batch_size = logits.size(0) + # for numerical stability + max_logits, _ = torch.max(logits, dim=1, keepdim=True) + # local to global + distributed.all_reduce(max_logits, distributed.ReduceOp.MAX) + logits.sub_(max_logits) + logits.exp_() + sum_logits_exp = torch.sum(logits, dim=1, keepdim=True) + # local to global + distributed.all_reduce(sum_logits_exp, distributed.ReduceOp.SUM) + logits.div_(sum_logits_exp) + index = torch.where(label != -1)[0] + # loss + loss = torch.zeros(batch_size, 1, device=logits.device) + loss[index] = logits[index].gather(1, label[index]) + distributed.all_reduce(loss, distributed.ReduceOp.SUM) + ctx.save_for_backward(index, logits, label) + return loss.clamp_min_(1e-30).log_().mean() * (-1) + + @staticmethod + def backward(ctx, loss_gradient): + """ + Args: + loss_grad (torch.Tensor): gradient backward by last layer + Returns: + gradients for each input in forward function + `None` gradients for one-hot label + """ + ( + index, + logits, + label, + ) = ctx.saved_tensors + batch_size = logits.size(0) + one_hot = torch.zeros( + size=[index.size(0), logits.size(1)], device=logits.device + ) + one_hot.scatter_(1, label[index], 1) + logits[index] -= one_hot + logits.div_(batch_size) + return logits * loss_gradient.item(), None + + +class DistCrossEntropy(torch.nn.Module): + def __init__(self): + super(DistCrossEntropy, self).__init__() + + def forward(self, logit_part, label_part): + return DistCrossEntropyFunc.apply(logit_part, label_part) + + +class AllGatherFunc(torch.autograd.Function): + """AllGather op with gradient backward""" + + @staticmethod + def forward(ctx, tensor, *gather_list): + gather_list = list(gather_list) + distributed.all_gather(gather_list, tensor) + return tuple(gather_list) + + @staticmethod + def backward(ctx, *grads): + grad_list = list(grads) + rank = distributed.get_rank() + grad_out = grad_list[rank] + + dist_ops = [ + distributed.reduce(grad_out, rank, distributed.ReduceOp.SUM, async_op=True) + if i == rank + else distributed.reduce( + grad_list[i], i, distributed.ReduceOp.SUM, async_op=True + ) + for i in range(distributed.get_world_size()) + ] + for _op in dist_ops: + _op.wait() + + grad_out *= len(grad_list) # cooperate with distributed loss function + return (grad_out, *[None for _ in range(len(grad_list))]) + + +AllGather = AllGatherFunc.apply diff --git a/arcface_torch/requirement.txt b/arcface_torch/requirement.txt new file mode 100644 index 0000000..f72c1b3 --- /dev/null +++ b/arcface_torch/requirement.txt @@ -0,0 +1,5 @@ +tensorboard +easydict +mxnet +onnx +sklearn diff --git a/arcface_torch/run.sh b/arcface_torch/run.sh new file mode 100644 index 0000000..4069075 --- /dev/null +++ b/arcface_torch/run.sh @@ -0,0 +1,9 @@ + +CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 python -m torch.distributed.launch \ +--nproc_per_node=8 \ +--nnodes=1 \ +--node_rank=0 \ +--master_addr="127.0.0.1" \ +--master_port=12345 train.py $@ + +ps -ef | grep "train" | grep -v grep | awk '{print "kill -9 "$2}' | sh diff --git a/arcface_torch/torch2onnx.py b/arcface_torch/torch2onnx.py new file mode 100644 index 0000000..63ce2c5 --- /dev/null +++ b/arcface_torch/torch2onnx.py @@ -0,0 +1,53 @@ +import numpy as np +import onnx +import torch + + +def convert_onnx(net, path_module, output, opset=11, simplify=False): + assert isinstance(net, torch.nn.Module) + img = np.random.randint(0, 255, size=(112, 112, 3), dtype=np.int32) + img = img.astype(np.float) + img = (img / 255. - 0.5) / 0.5 # torch style norm + img = img.transpose((2, 0, 1)) + img = torch.from_numpy(img).unsqueeze(0).float() + + weight = torch.load(path_module) + net.load_state_dict(weight, strict=True) + net.eval() + torch.onnx.export(net, img, output, keep_initializers_as_inputs=False, verbose=False, opset_version=opset) + model = onnx.load(output) + graph = model.graph + graph.input[0].type.tensor_type.shape.dim[0].dim_param = 'None' + if simplify: + from onnxsim import simplify + model, check = simplify(model) + assert check, "Simplified ONNX model could not be validated" + onnx.save(model, output) + + +if __name__ == '__main__': + import os + import argparse + from backbones import get_model + + parser = argparse.ArgumentParser(description='ArcFace PyTorch to onnx') + parser.add_argument('input', type=str, help='input backbone.pth file or path') + parser.add_argument('--output', type=str, default=None, help='output onnx path') + parser.add_argument('--network', type=str, default=None, help='backbone network') + parser.add_argument('--simplify', type=bool, default=False, help='onnx simplify') + args = parser.parse_args() + input_file = args.input + if os.path.isdir(input_file): + input_file = os.path.join(input_file, "model.pt") + assert os.path.exists(input_file) + # model_name = os.path.basename(os.path.dirname(input_file)).lower() + # params = model_name.split("_") + # if len(params) >= 3 and params[1] in ('arcface', 'cosface'): + # if args.network is None: + # args.network = params[2] + assert args.network is not None + print(args) + backbone_onnx = get_model(args.network, dropout=0) + if args.output is None: + args.output = os.path.join(os.path.dirname(args.input), "model.onnx") + convert_onnx(backbone_onnx, input_file, args.output, simplify=args.simplify) diff --git a/arcface_torch/train.py b/arcface_torch/train.py new file mode 100644 index 0000000..9e27e8c --- /dev/null +++ b/arcface_torch/train.py @@ -0,0 +1,161 @@ +import argparse +import logging +import os + +import torch +from torch import distributed +from torch.utils.tensorboard import SummaryWriter + +from backbones import get_model +from dataset import get_dataloader +from torch.utils.data import DataLoader +from lr_scheduler import PolyScheduler +from losses import CosFace, ArcFace +from partial_fc import PartialFC +from utils.utils_callbacks import CallBackLogging, CallBackVerification +from utils.utils_config import get_config +from utils.utils_logging import AverageMeter, init_logging + + +try: + world_size = int(os.environ["WORLD_SIZE"]) + rank = int(os.environ["RANK"]) + distributed.init_process_group("nccl") +except KeyError: + world_size = 1 + rank = 0 + distributed.init_process_group( + backend="nccl", + init_method="tcp://127.0.0.1:12584", + rank=rank, + world_size=world_size, + ) + + +def main(args): + torch.cuda.set_device(args.local_rank) + cfg = get_config(args.config) + + os.makedirs(cfg.output, exist_ok=True) + init_logging(rank, cfg.output) + summary_writer = ( + SummaryWriter(log_dir=os.path.join(cfg.output, "tensorboard")) + if rank == 0 + else None + ) + train_loader = get_dataloader( + cfg.rec, local_rank=args.local_rank, batch_size=cfg.batch_size, dali=cfg.dali) + backbone = get_model( + cfg.network, dropout=0.0, fp16=cfg.fp16, num_features=cfg.embedding_size + ).cuda() + + backbone = torch.nn.parallel.DistributedDataParallel( + module=backbone, broadcast_buffers=False, device_ids=[args.local_rank]) + backbone.train() + + if cfg.loss == "arcface": + margin_loss = ArcFace() + elif cfg.loss == "cosface": + margin_loss = CosFace() + else: + raise + + module_partial_fc = PartialFC( + margin_loss, + cfg.embedding_size, + cfg.num_classes, + cfg.sample_rate, + cfg.fp16 + ) + module_partial_fc.train().cuda() + + # TODO the params of partial fc must be last in the params list + opt = torch.optim.SGD( + params=[ + {"params": backbone.parameters(), }, + {"params": module_partial_fc.parameters(), }, + ], + lr=cfg.lr, + momentum=0.9, + weight_decay=cfg.weight_decay + ) + total_batch_size = cfg.batch_size * world_size + cfg.warmup_step = cfg.num_image // total_batch_size * cfg.warmup_epoch + cfg.total_step = cfg.num_image // total_batch_size * cfg.num_epoch + lr_scheduler = PolyScheduler( + optimizer=opt, + base_lr=cfg.lr, + max_steps=cfg.total_step, + warmup_steps=cfg.warmup_step + ) + + for key, value in cfg.items(): + num_space = 25 - len(key) + logging.info(": " + key + " " * num_space + str(value)) + + callback_verification = CallBackVerification( + val_targets=cfg.val_targets, rec_prefix=cfg.rec, summary_writer=summary_writer + ) + callback_logging = CallBackLogging( + frequent=cfg.frequent, + total_step=cfg.total_step, + batch_size=cfg.batch_size, + writer=summary_writer + ) + + loss_am = AverageMeter() + start_epoch = 0 + global_step = 0 + amp = torch.cuda.amp.grad_scaler.GradScaler(growth_interval=100) + + for epoch in range(start_epoch, cfg.num_epoch): + + if isinstance(train_loader, DataLoader): + train_loader.sampler.set_epoch(epoch) + for _, (img, local_labels) in enumerate(train_loader): + global_step += 1 + local_embeddings = backbone(img) + loss: torch.Tensor = module_partial_fc(local_embeddings, local_labels, opt) + + if cfg.fp16: + amp.scale(loss).backward() + amp.unscale_(opt) + torch.nn.utils.clip_grad_norm_(backbone.parameters(), 5) + amp.step(opt) + amp.update() + else: + loss.backward() + torch.nn.utils.clip_grad_norm_(backbone.parameters(), 5) + opt.step() + + opt.zero_grad() + lr_scheduler.step() + + with torch.no_grad(): + loss_am.update(loss.item(), 1) + callback_logging(global_step, loss_am, epoch, cfg.fp16, lr_scheduler.get_last_lr()[0], amp) + + if global_step % cfg.verbose == 0 and global_step > 200: + callback_verification(global_step, backbone) + + path_pfc = os.path.join(cfg.output, "softmax_fc_gpu_{}.pt".format(rank)) + torch.save(module_partial_fc.state_dict(), path_pfc) + if rank == 0: + path_module = os.path.join(cfg.output, "model.pt") + torch.save(backbone.module.state_dict(), path_module) + + if cfg.dali: + train_loader.reset() + + if rank == 0: + path_module = os.path.join(cfg.output, "model.pt") + torch.save(backbone.module.state_dict(), path_module) + distributed.destroy_process_group() + + +if __name__ == "__main__": + torch.backends.cudnn.benchmark = True + parser = argparse.ArgumentParser(description="Distributed Arcface Training in Pytorch") + parser.add_argument("config", type=str, help="py config file") + parser.add_argument("--local_rank", type=int, default=0, help="local_rank") + main(parser.parse_args()) diff --git a/arcface_torch/utils/__init__.py b/arcface_torch/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/arcface_torch/utils/plot.py b/arcface_torch/utils/plot.py new file mode 100644 index 0000000..7f1d39d --- /dev/null +++ b/arcface_torch/utils/plot.py @@ -0,0 +1,71 @@ +import os +import sys + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap +from prettytable import PrettyTable +from sklearn.metrics import roc_curve, auc + +with open(sys.argv[1], "r") as f: + files = f.readlines() + +files = [x.strip() for x in files] +image_path = "/train_tmp/IJB_release/IJBC" + + +def read_template_pair_list(path): + pairs = pd.read_csv(path, sep=' ', header=None).values + t1 = pairs[:, 0].astype(np.int) + t2 = pairs[:, 1].astype(np.int) + label = pairs[:, 2].astype(np.int) + return t1, t2, label + + +p1, p2, label = read_template_pair_list( + os.path.join('%s/meta' % image_path, + '%s_template_pair_label.txt' % 'ijbc')) + +methods = [] +scores = [] +for file in files: + methods.append(file) + scores.append(np.load(file)) + +methods = np.array(methods) +scores = dict(zip(methods, scores)) +colours = dict( + zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2'))) +x_labels = [10 ** -6, 10 ** -5, 10 ** -4, 10 ** -3, 10 ** -2, 10 ** -1] +tpr_fpr_table = PrettyTable(['Methods'] + [str(x) for x in x_labels]) +fig = plt.figure() +for method in methods: + fpr, tpr, _ = roc_curve(label, scores[method]) + roc_auc = auc(fpr, tpr) + fpr = np.flipud(fpr) + tpr = np.flipud(tpr) # select largest tpr at same fpr + plt.plot(fpr, + tpr, + color=colours[method], + lw=1, + label=('[%s (AUC = %0.4f %%)]' % + (method.split('-')[-1], roc_auc * 100))) + tpr_fpr_row = [] + tpr_fpr_row.append(method) + for fpr_iter in np.arange(len(x_labels)): + _, min_index = min( + list(zip(abs(fpr - x_labels[fpr_iter]), range(len(fpr))))) + tpr_fpr_row.append('%.2f' % (tpr[min_index] * 100)) + tpr_fpr_table.add_row(tpr_fpr_row) +plt.xlim([10 ** -6, 0.1]) +plt.ylim([0.3, 1.0]) +plt.grid(linestyle='--', linewidth=1) +plt.xticks(x_labels) +plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) +plt.xscale('log') +plt.xlabel('False Positive Rate') +plt.ylabel('True Positive Rate') +plt.title('ROC on IJB') +plt.legend(loc="lower right") +print(tpr_fpr_table) diff --git a/arcface_torch/utils/utils_callbacks.py b/arcface_torch/utils/utils_callbacks.py new file mode 100644 index 0000000..97fe403 --- /dev/null +++ b/arcface_torch/utils/utils_callbacks.py @@ -0,0 +1,110 @@ +import logging +import os +import time +from typing import List + +import torch + +from eval import verification +from utils.utils_logging import AverageMeter +from torch.utils.tensorboard import SummaryWriter +from torch import distributed + + +class CallBackVerification(object): + + def __init__(self, val_targets, rec_prefix, summary_writer=None, image_size=(112, 112)): + self.rank: int = distributed.get_rank() + self.highest_acc: float = 0.0 + self.highest_acc_list: List[float] = [0.0] * len(val_targets) + self.ver_list: List[object] = [] + self.ver_name_list: List[str] = [] + if self.rank is 0: + self.init_dataset(val_targets=val_targets, data_dir=rec_prefix, image_size=image_size) + + self.summary_writer = summary_writer + + def ver_test(self, backbone: torch.nn.Module, global_step: int): + results = [] + for i in range(len(self.ver_list)): + acc1, std1, acc2, std2, xnorm, embeddings_list = verification.test( + self.ver_list[i], backbone, 10, 10) + logging.info('[%s][%d]XNorm: %f' % (self.ver_name_list[i], global_step, xnorm)) + logging.info('[%s][%d]Accuracy-Flip: %1.5f+-%1.5f' % (self.ver_name_list[i], global_step, acc2, std2)) + + self.summary_writer: SummaryWriter + self.summary_writer.add_scalar(tag=self.ver_name_list[i], scalar_value=acc2, global_step=global_step, ) + + if acc2 > self.highest_acc_list[i]: + self.highest_acc_list[i] = acc2 + logging.info( + '[%s][%d]Accuracy-Highest: %1.5f' % (self.ver_name_list[i], global_step, self.highest_acc_list[i])) + results.append(acc2) + + def init_dataset(self, val_targets, data_dir, image_size): + for name in val_targets: + path = os.path.join(data_dir, name + ".bin") + if os.path.exists(path): + data_set = verification.load_bin(path, image_size) + self.ver_list.append(data_set) + self.ver_name_list.append(name) + + def __call__(self, num_update, backbone: torch.nn.Module): + if self.rank is 0 and num_update > 0: + backbone.eval() + self.ver_test(backbone, num_update) + backbone.train() + + +class CallBackLogging(object): + def __init__(self, frequent, total_step, batch_size, writer=None): + self.frequent: int = frequent + self.rank: int = distributed.get_rank() + self.world_size: int = distributed.get_world_size() + self.time_start = time.time() + self.total_step: int = total_step + self.batch_size: int = batch_size + self.writer = writer + + self.init = False + self.tic = 0 + + def __call__(self, + global_step: int, + loss: AverageMeter, + epoch: int, + fp16: bool, + learning_rate: float, + grad_scaler: torch.cuda.amp.GradScaler): + if self.rank == 0 and global_step > 0 and global_step % self.frequent == 0: + if self.init: + try: + speed: float = self.frequent * self.batch_size / (time.time() - self.tic) + speed_total = speed * self.world_size + except ZeroDivisionError: + speed_total = float('inf') + + time_now = (time.time() - self.time_start) / 3600 + time_total = time_now / ((global_step + 1) / self.total_step) + time_for_end = time_total - time_now + if self.writer is not None: + self.writer.add_scalar('time_for_end', time_for_end, global_step) + self.writer.add_scalar('learning_rate', learning_rate, global_step) + self.writer.add_scalar('loss', loss.avg, global_step) + if fp16: + msg = "Speed %.2f samples/sec Loss %.4f LearningRate %.4f Epoch: %d Global Step: %d " \ + "Fp16 Grad Scale: %2.f Required: %1.f hours" % ( + speed_total, loss.avg, learning_rate, epoch, global_step, + grad_scaler.get_scale(), time_for_end + ) + else: + msg = "Speed %.2f samples/sec Loss %.4f LearningRate %.4f Epoch: %d Global Step: %d " \ + "Required: %1.f hours" % ( + speed_total, loss.avg, learning_rate, epoch, global_step, time_for_end + ) + logging.info(msg) + loss.reset() + self.tic = time.time() + else: + self.init = True + self.tic = time.time() diff --git a/arcface_torch/utils/utils_config.py b/arcface_torch/utils/utils_config.py new file mode 100644 index 0000000..0c02eaf --- /dev/null +++ b/arcface_torch/utils/utils_config.py @@ -0,0 +1,16 @@ +import importlib +import os.path as osp + + +def get_config(config_file): + assert config_file.startswith('configs/'), 'config file setting must start with configs/' + temp_config_name = osp.basename(config_file) + temp_module_name = osp.splitext(temp_config_name)[0] + config = importlib.import_module("configs.base") + cfg = config.config + config = importlib.import_module("configs.%s" % temp_module_name) + job_cfg = config.config + cfg.update(job_cfg) + if cfg.output is None: + cfg.output = osp.join('work_dirs', temp_module_name) + return cfg \ No newline at end of file diff --git a/arcface_torch/utils/utils_logging.py b/arcface_torch/utils/utils_logging.py new file mode 100644 index 0000000..c787b6a --- /dev/null +++ b/arcface_torch/utils/utils_logging.py @@ -0,0 +1,41 @@ +import logging +import os +import sys + + +class AverageMeter(object): + """Computes and stores the average and current value + """ + + def __init__(self): + self.val = None + self.avg = None + self.sum = None + self.count = None + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def init_logging(rank, models_root): + if rank == 0: + log_root = logging.getLogger() + log_root.setLevel(logging.INFO) + formatter = logging.Formatter("Training: %(asctime)s-%(message)s") + handler_file = logging.FileHandler(os.path.join(models_root, "training.log")) + handler_stream = logging.StreamHandler(sys.stdout) + handler_file.setFormatter(formatter) + handler_stream.setFormatter(formatter) + log_root.addHandler(handler_file) + log_root.addHandler(handler_stream) + log_root.info('rank_id: %d' % rank) diff --git a/breakpoint.json b/breakpoint.json index ed813c5..fa8f0bf 100644 --- a/breakpoint.json +++ b/breakpoint.json @@ -1,6 +1,6 @@ { "breakpoint": [ - 1877, - 29 + 31, + 110 ] } \ No newline at end of file diff --git a/check_list.txt b/check_list.txt index afef6f5..8ea0a9d 100644 --- a/check_list.txt +++ b/check_list.txt @@ -6726,3 +6726,261 @@ n002000\0058_02.jpg n002000\0130_01.jpg n002000\0135_01.jpg n002000\0160_02.jpg +n000002\0054_01.jpg +n000002\0055_01.jpg +n000002\0138_01.jpg +n000002\0150_02.jpg +n000002\0208_01.jpg +n000002\0252_01.jpg +n000002\0273_01.jpg +n000002\0276_01.jpg +n000003\0024_01.jpg +n000003\0098_01.jpg +n000003\0219_01.jpg +n000004\0026_01.jpg +n000004\0084_01.jpg +n000004\0103_02.jpg +n000004\0118_01.jpg +n000004\0144_02.jpg +n000004\0155_01.jpg +n000004\0180_01.jpg +n000004\0231_01.jpg +n000004\0237_01.jpg +n000004\0239_01.jpg +n000004\0258_01.jpg +n000005\0138_01.jpg +n000005\0144_01.jpg +n000005\0287_01.jpg +n000006\0007_01.jpg +n000006\0014_01.jpg +n000006\0036_02.jpg +n000006\0091_01.jpg +n000006\0103_01.jpg +n000006\0281_01.jpg +n000006\0300_01.jpg +n000006\0351_01.jpg +n000006\0430_01.jpg +n000006\0519_01.jpg +n000007\0021_01.jpg +n000007\0042_01.jpg +n000007\0045_01.jpg +n000007\0050_02.jpg +n000007\0080_01.jpg +n000007\0086_01.jpg +n000007\0106_02.jpg +n000007\0115_01.jpg +n000007\0116_03.jpg +n000007\0119_01.jpg +n000007\0137_01.jpg +n000007\0140_02.jpg +n000007\0148_02.jpg +n000007\0174_01.jpg +n000007\0181_01.jpg +n000007\0182_02.jpg +n000007\0213_02.jpg +n000007\0226_02.jpg +n000007\0229_01.jpg +n000007\0432_01.jpg +n000008\0072_01.jpg +n000008\0297_01.jpg +n000010\0068_01.jpg +n000010\0069_01.jpg +n000010\0096_01.jpg +n000010\0150_02.jpg +n000010\0155_02.jpg +n000010\0223_01.jpg +n000011\0112_01.jpg +n000011\0142_02.jpg +n000011\0200_01.jpg +n000011\0217_01.jpg +n000011\0229_02.jpg +n000011\0291_02.jpg +n000012\0173_01.jpg +n000012\0180_01.jpg +n000012\0198_01.jpg +n000012\0282_01.jpg +n000012\0294_01.jpg +n000012\0307_01.jpg +n000012\0338_01.jpg +n000013\0029_06.jpg +n000013\0128_01.jpg +n000013\0132_01.jpg +n000013\0148_01.jpg +n000013\0190_02.jpg +n000013\0225_01.jpg +n000013\0277_01.jpg +n000013\0335_01.jpg +n000013\0337_01.jpg +n000013\0341_02.jpg +n000014\0163_01.jpg +n000015\0029_02.jpg +n000015\0059_01.jpg +n000015\0133_01.jpg +n000015\0243_02.jpg +n000015\0392_02.jpg +n000015\0393_01.jpg +n000015\0402_01.jpg +n000016\0189_01.jpg +n000016\0237_01.jpg +n000016\0266_01.jpg +n000016\0385_04.jpg +n000016\0391_01.jpg +n000016\0405_01.jpg +n000016\0477_02.jpg +n000016\0500_01.jpg +n000016\0503_01.jpg +n000016\0503_01.jpg +n000017\0123_02.jpg +n000017\0124_01.jpg +n000017\0163_01.jpg +n000017\0262_01.jpg +n000019\0038_01.jpg +n000019\0055_01.jpg +n000019\0061_01.jpg +n000019\0114_01.jpg +n000019\0130_02.jpg +n000019\0149_02.jpg +n000019\0170_01.jpg +n000019\0182_01.jpg +n000019\0219_01.jpg +n000019\0221_02.jpg +n000019\0234_02.jpg +n000019\0249_01.jpg +n000019\0259_01.jpg +n000019\0273_01.jpg +n000019\0306_01.jpg +n000019\0313_01.jpg +n000019\0333_01.jpg +n000019\0350_02.jpg +n000020\0006_01.jpg +n000020\0071_01.jpg +n000020\0074_02.jpg +n000020\0099_02.jpg +n000020\0379_01.jpg +n000020\0400_01.jpg +n000021\0120_02.jpg +n000021\0221_01.jpg +n000022\0051_01.jpg +n000022\0071_01.jpg +n000022\0146_02.jpg +n000022\0146_02.jpg +n000022\0236_01.jpg +n000023\0008_01.jpg +n000023\0078_01.jpg +n000023\0093_01.jpg +n000023\0133_01.jpg +n000023\0162_01.jpg +n000023\0198_01.jpg +n000023\0207_03.jpg +n000023\0269_02.jpg +n000023\0265_01.jpg +n000023\0280_01.jpg +n000023\0366_01.jpg +n000023\0389_01.jpg +n000024\0062_01.jpg +n000024\0073_01.jpg +n000024\0354_04.jpg +n000024\0409_01.jpg +n000025\0100_02.jpg +n000025\0274_02.jpg +n000026\0038_01.jpg +n000026\0041_01.jpg +n000026\0059_01.jpg +n000026\0062_01.jpg +n000026\0065_01.jpg +n000026\0082_02.jpg +n000026\0103_01.jpg +n000026\0137_01.jpg +n000026\0060_01.jpg +n000026\0179_03.jpg +n000026\0196_01.jpg +n000026\0248_01.jpg +n000026\0255_01.jpg +n000026\0273_01.jpg +n000026\0280_01.jpg +n000027\0023_02.jpg +n000027\0023_05.jpg +n000027\0115_01.jpg +n000027\0157_02.jpg +n000027\0171_01.jpg +n000027\0182_02.jpg +n000027\0211_02.jpg +n000027\0255_01.jpg +n000027\0274_04.jpg +n000027\0318_04.jpg +n000027\0326_01.jpg +n000027\0401_01.jpg +n000027\0402_01.jpg +n000027\0438_01.jpg +n000027\0442_01.jpg +n000027\0493_01.jpg +n000028\0040_04.jpg +n000028\0056_01.jpg +n000028\0134_01.jpg +n000028\0136_03.jpg +n000028\0138_01.jpg +n000028\0144_02.jpg +n000028\0156_01.jpg +n000028\0162_01.jpg +n000028\0168_01.jpg +n000028\0205_01.jpg +n000028\0220_01.jpg +n000028\0249_01.jpg +n000028\0300_01.jpg +n000028\0324_02.jpg +n000028\0343_01.jpg +n000028\0352_01.jpg +n000028\0384_01.jpg +n000028\0392_01.jpg +n000028\0408_02.jpg +n000028\0412_02.jpg +n000030\0112_01.jpg +n000030\0119_01.jpg +n000030\0156_01.jpg +n000030\0192_01.jpg +n000030\0195_01.jpg +n000030\0203_01.jpg +n000030\0218_02.jpg +n000030\0305_01.jpg +n000031\0025_01.jpg +n000031\0080_02.jpg +n000031\0141_01.jpg +n000031\0196_01.jpg +n000031\0215_01.jpg +n000031\0286_02.jpg +n000032\0085_01.jpg +n000032\0100_01.jpg +n000032\0100_02.jpg +n000032\0233_01.jpg +n000032\0261_01.jpg +n000032\0350_01.jpg +n000032\0374_01.jpg +n000032\0393_02.jpg +n000032\0428_01.jpg +n000032\0443_01.jpg +n000032\0459_01.jpg +n000032\0465_02.jpg +n000033\0031_01.jpg +n000033\0032_02.jpg +n000033\0034_01.jpg +n000033\0034_02.jpg +n000033\0080_01.jpg +n000033\0100_01.jpg +n000033\0100_02.jpg +n000033\0122_01.jpg +n000033\0164_02.jpg +n000033\0166_01.jpg +n000033\0250_02.jpg +n000033\0327_01.jpg +n000033\0337_01.jpg +n000034\0327_01.jpg +n000035\0072_02.jpg +n000035\0099_01.jpg +n000035\0132_03.jpg +n000035\0134_01.jpg +n000035\0150_01.jpg +n000035\0158_01.jpg +n000035\0159_02.jpg +n000035\0167_01.jpg +n000035\0170_01.jpg +n000035\0200_01.jpg diff --git a/test_arcface.py b/test_arcface.py new file mode 100644 index 0000000..8238b16 --- /dev/null +++ b/test_arcface.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: test_arcface.py +# Created Date: Thursday March 17th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Thursday, 17th March 2022 12:34:57 am +# Modified By: Chen Xuanhong +# Copyright (c) 2022 Shanghai Jiao Tong University +############################################################# +import torch + +if __name__ == "__main__": + arcface1 = torch.load("./arcface_ckpt/arcface_checkpoint.tar", map_location=torch.device("cpu")) + print(arcface1) + arcface = arcface1['model'].module \ No newline at end of file diff --git a/train_scripts/trainer_multi_gpu.py b/train_scripts/trainer_multi_gpu.py index 6b07d0d..2763ac4 100644 --- a/train_scripts/trainer_multi_gpu.py +++ b/train_scripts/trainer_multi_gpu.py @@ -5,7 +5,7 @@ # Created Date: Sunday January 9th 2022 # Author: Chen Xuanhong # Email: chenxuanhongzju@outlook.com -# Last Modified: Tuesday, 15th February 2022 12:00:24 am +# Last Modified: Thursday, 17th March 2022 1:01:52 am # Modified By: Chen Xuanhong # Copyright (c) 2022 Shanghai Jiao Tong University ############################################################# @@ -26,6 +26,8 @@ from torch_utils import training_stats from torch_utils.ops import conv2d_gradfix from torch_utils.ops import grid_sample_gradfix +from arcface_torch.backbones.iresnet import iresnet100 + from utilities.plot import plot_batch from losses.cos import cosin_metric from train_scripts.trainer_multigpu_base import TrainerBase @@ -95,8 +97,12 @@ def init_framework(config, reporter, device, rank): reporter.writeInfo("Discriminator structure:") reporter.writeModel(dis.__str__()) - arcface1 = torch.load(config["arcface_ckpt"], map_location=torch.device("cpu")) - arcface = arcface1['model'].module + # arcface1 = torch.load(config["arcface_ckpt"], map_location=torch.device("cpu")) + # arcface = arcface1['model'].module + + arcface = iresnet100(pretrained=False, fp16=False) + arcface.load_state_dict(torch.load(config["arcface_ckpt"], map_location='cpu')) + arcface.eval() # train in GPU diff --git a/vggface2hq_failed.txt b/vggface2hq_failed.txt index be8f944..caa9ec3 100644 --- a/vggface2hq_failed.txt +++ b/vggface2hq_failed.txt @@ -5838,6 +5838,6734 @@ n000999/0035_01.jpg n000999/0104_01.jpg n000999/0264_01.jpg n001000/0264_01.jpg +n001001/0015_03.jpg +n001001/0043_02.jpg +n001001/0053_01.jpg +n001001/0094_01.jpg +n001001/0171_02.jpg +n001001/0233_02.jpg +n001001/0278_02.jpg +n001001/0281_03.jpg +n001001/0335_02.jpg +n001001/0356_05.jpg +n001001/0421_01.jpg +n001001/0455_02.jpg +n001002/0012_02.jpg +n001002/0091_02.jpg +n001002/0094_01.jpg +n001002/0106_02.jpg +n001002/0142_02.jpg +n001002/0263_01.jpg +n001002/0280_01.jpg +n001002/0357_01.jpg +n001002/0398_04.jpg +n001003/0007_02.jpg +n001003/0029_01.jpg +n001003/0124_01.jpg +n001003/0139_03.jpg +n001003/0145_03.jpg +n001003/0174_01.jpg +n001003/0230_02.jpg +n001003/0327_02.jpg +n001003/0329_01.jpg +n001003/0338_01.jpg +n001004/0168_01.jpg +n001005/0003_01.jpg +n001005/0018_03.jpg +n001005/0060_01.jpg +n001005/0122_02.jpg +n001005/0131_03.jpg +n001005/0144_01.jpg +n001005/0199_01.jpg +n001005/0282_01.jpg +n001005/0311_01.jpg +n001005/0351_02.jpg +n001006/0162_02.jpg +n001006/0168_01.jpg +n001006/0174_02.jpg +n001006/0175_01.jpg +n001006/0421_01.jpg +n001006/0425_01.jpg +n001006/0472_01.jpg +n001006/0574_01.jpg +n001007/0038_01.jpg +n001007/0063_01.jpg +n001007/0158_02.jpg +n001007/0189_01.jpg +n001007/0248_01.jpg +n001007/0330_02.jpg +n001007/0332_01.jpg +n001007/0343_01.jpg +n001007/0374_02.jpg +n001008/0030_02.jpg +n001008/0031_02.jpg +n001008/0038_02.jpg +n001008/0055_01.jpg +n001008/0062_02.jpg +n001008/0123_01.jpg +n001008/0120_01.jpg +n001008/0138_01.jpg +n001008/0435_04.jpg +n001009/0036_01.jpg +n001009/0064_01.jpg +n001009/0159_01.jpg +n001009/0261_02.jpg +n001009/0395_01.jpg +n001010/0076_03.jpg +n001010/0093_01.jpg +n001010/0151_01.jpg +n001010/0152_02.jpg +n001010/0509_01.jpg +n001010/0509_03.jpg +n001010/0511_01.jpg +n001011/0037_01.jpg +n001011/0144_01.jpg +n001011/0199_01.jpg +n001011/0273_01.jpg +n001011/0275_01.jpg +n001012/0096_01.jpg +n001012/0383_02.jpg +n001014/0038_02.jpg +n001015/0022_02.jpg +n001015/0037_01.jpg +n001015/0047_02.jpg +n001015/0063_03.jpg +n001015/0097_05.jpg +n001015/0213_03.jpg +n001015/0225_02.jpg +n001015/0278_01.jpg +n001015/0304_01.jpg +n001015/0305_01.jpg +n001015/0310_02.jpg +n001015/0314_01.jpg +n001015/0322_02.jpg +n001015/0359_01.jpg +n001015/0356_01.jpg +n001015/0394_01.jpg +n001015/0409_01.jpg +n001015/0448_01.jpg +n001015/0477_01.jpg +n001015/0515_01.jpg +n001015/0556_01.jpg +n001016/0151_01.jpg +n001016/0153_02.jpg +n001016/0163_02.jpg +n001016/0172_02.jpg +n001016/0323_01.jpg +n001016/0380_01.jpg +n001017/0013_01.jpg +n001017/0133_01.jpg +n001017/0253_01.jpg +n001017/0297_01.jpg +n001018/0076_02.jpg +n001018/0188_01.jpg +n001018/0208_01.jpg +n001018/0310_01.jpg +n001018/0386_01.jpg +n001018/0441_01.jpg +n001018/0470_01.jpg +n001019/0083_02.jpg +n001019/0093_01.jpg +n001019/0141_03.jpg +n001019/0273_01.jpg +n001019/0291_01.jpg +n001019/0301_01.jpg +n001019/0340_02.jpg +n001019/0347_01.jpg +n001019/0444_02.jpg +n001019/0532_01.jpg +n001023/0010_01.jpg +n001023/0039_02.jpg +n001023/0041_01.jpg +n001023/0085_01.jpg +n001023/0263_01.jpg +n001024/0064_01.jpg +n001024/0122_01.jpg +n001024/0162_01.jpg +n001024/0167_01.jpg +n001024/0199_01.jpg +n001024/0260_01.jpg +n001024/0261_01.jpg +n001024/0262_01.jpg +n001024/0280_01.jpg +n001024/0364_01.jpg +n001024/0476_01.jpg +n001025/0184_02.jpg +n001025/0195_01.jpg +n001025/0203_01.jpg +n001025/0226_01.jpg +n001025/0281_02.jpg +n001025/0404_02.jpg +n001025/0441_02.jpg +n001025/0446_01.jpg +n001026/0030_01.jpg +n001026/0100_01.jpg +n001026/0266_01.jpg +n001026/0349_01.jpg +n001027/0046_01.jpg +n001027/0135_01.jpg +n001027/0146_02.jpg +n001027/0153_01.jpg +n001027/0238_01.jpg +n001027/0265_01.jpg +n001027/0302_01.jpg +n001027/0304_01.jpg +n001027/0339_01.jpg +n001027/0363_01.jpg +n001028/0036_01.jpg +n001028/0072_01.jpg +n001028/0177_01.jpg +n001028/0178_02.jpg +n001028/0219_01.jpg +n001028/0227_01.jpg +n001028/0237_02.jpg +n001028/0287_02.jpg +n001028/0457_05.jpg +n001028/0496_01.jpg +n001028/0553_01.jpg +n001029/0034_03.jpg +n001029/0181_01.jpg +n001030/0014_02.jpg +n001030/0123_02.jpg +n001030/0157_01.jpg +n001030/0162_02.jpg +n001030/0208_02.jpg +n001030/0312_01.jpg +n001031/0018_01.jpg +n001031/0046_01.jpg +n001031/0144_02.jpg +n001031/0183_01.jpg +n001031/0200_01.jpg +n001031/0221_03.jpg +n001031/0278_02.jpg +n001031/0288_02.jpg +n001031/0399_01.jpg +n001032/0086_01.jpg +n001032/0112_02.jpg +n001032/0206_01.jpg +n001032/0326_01.jpg +n001033/0016_02.jpg +n001033/0059_03.jpg +n001033/0077_04.jpg +n001033/0110_01.jpg +n001033/0128_02.jpg +n001033/0138_01.jpg +n001033/0173_01.jpg +n001033/0194_01.jpg +n001033/0305_01.jpg +n001033/0328_02.jpg +n001033/0329_01.jpg +n001033/0336_01.jpg +n001033/0365_01.jpg +n001033/0372_01.jpg +n001033/0393_02.jpg +n001033/0431_01.jpg +n001033/0435_02.jpg +n001034/0075_02.jpg +n001034/0085_01.jpg +n001034/0090_01.jpg +n001034/0090_02.jpg +n001034/0188_01.jpg +n001034/0214_01.jpg +n001034/0220_01.jpg +n001035/0141_03.jpg +n001035/0153_01.jpg +n001036/0007_02.jpg +n001036/0034_02.jpg +n001036/0032_02.jpg +n001036/0117_02.jpg +n001036/0125_03.jpg +n001036/0132_01.jpg +n001036/0148_01.jpg +n001036/0206_01.jpg +n001036/0266_01.jpg +n001036/0269_01.jpg +n001036/0338_04.jpg +n001036/0359_02.jpg +n001036/0381_01.jpg +n001040/0035_02.jpg +n001040/0075_04.jpg +n001040/0188_01.jpg +n001040/0235_01.jpg +n001040/0329_01.jpg +n001040/0373_02.jpg +n001040/0378_01.jpg +n001040/0381_01.jpg +n001040/0391_02.jpg +n001040/0394_01.jpg +n001041/0073_02.jpg +n001041/0310_02.jpg +n001042/0060_01.jpg +n001042/0122_01.jpg +n001042/0152_01.jpg +n001042/0374_01.jpg +n001042/0379_01.jpg +n001042/0380_01.jpg +n001042/0391_02.jpg +n001042/0397_01.jpg +n001042/0399_01.jpg +n001042/0400_01.jpg +n001042/0403_01.jpg +n001042/0404_01.jpg +n001042/0496_01.jpg +n001044/0020_04.jpg +n001044/0051_05.jpg +n001044/0085_01.jpg +n001044/0131_01.jpg +n001044/0259_02.jpg +n001044/0326_01.jpg +n001044/0326_02.jpg +n001044/0373_02.jpg +n001044/0433_01.jpg +n001044/0445_01.jpg +n001045/0085_02.jpg +n001045/0136_01.jpg +n001045/0230_01.jpg +n001045/0239_01.jpg +n001045/0239_04.jpg +n001046/0099_02.jpg +n001046/0108_01.jpg +n001047/0119_02.jpg +n001047/0122_01.jpg +n001047/0136_01.jpg +n001047/0253_01.jpg +n001047/0254_02.jpg +n001047/0277_01.jpg +n001047/0305_01.jpg +n001047/0392_01.jpg +n001048/0156_02.jpg +n001048/0228_02.jpg +n001048/0230_01.jpg +n001048/0366_02.jpg +n001049/0009_03.jpg +n001049/0041_01.jpg +n001049/0077_01.jpg +n001049/0118_02.jpg +n001049/0149_01.jpg +n001049/0186_02.jpg +n001050/0016_01.jpg +n001050/0061_01.jpg +n001050/0059_01.jpg +n001050/0076_02.jpg +n001050/0077_01.jpg +n001050/0087_01.jpg +n001050/0099_01.jpg +n001050/0108_01.jpg +n001050/0115_02.jpg +n001050/0117_01.jpg +n001050/0118_01.jpg +n001050/0125_01.jpg +n001050/0134_01.jpg +n001050/0164_01.jpg +n001050/0201_01.jpg +n001050/0202_03.jpg +n001050/0207_01.jpg +n001050/0225_02.jpg +n001050/0228_01.jpg +n001050/0238_01.jpg +n001050/0247_01.jpg +n001050/0252_01.jpg +n001050/0258_02.jpg +n001050/0354_01.jpg +n001050/0372_01.jpg +n001050/0373_01.jpg +n001050/0384_01.jpg +n001050/0387_01.jpg +n001050/0395_01.jpg +n001051/0043_02.jpg +n001051/0097_01.jpg +n001051/0239_01.jpg +n001051/0271_01.jpg +n001052/0018_03.jpg +n001052/0150_02.jpg +n001052/0179_02.jpg +n001052/0208_02.jpg +n001052/0228_01.jpg +n001052/0263_01.jpg +n001052/0354_02.jpg +n001052/0354_01.jpg +n001052/0376_01.jpg +n001052/0387_01.jpg +n001052/0407_01.jpg +n001052/0415_02.jpg +n001052/0418_01.jpg +n001052/0511_01.jpg +n001052/0524_01.jpg +n001053/0125_01.jpg +n001053/0121_01.jpg +n001053/0190_01.jpg +n001053/0255_01.jpg +n001053/0511_03.jpg +n001054/0080_03.jpg +n001054/0140_01.jpg +n001054/0159_01.jpg +n001054/0579_01.jpg +n001055/0025_01.jpg +n001055/0061_01.jpg +n001055/0072_01.jpg +n001055/0140_01.jpg +n001055/0142_01.jpg +n001055/0627_01.jpg +n001056/0357_01.jpg +n001056/0385_01.jpg +n001056/0393_02.jpg +n001056/0402_01.jpg +n001057/0004_02.jpg +n001057/0088_02.jpg +n001057/0091_02.jpg +n001057/0108_01.jpg +n001057/0115_01.jpg +n001057/0152_01.jpg +n001057/0228_03.jpg +n001057/0242_02.jpg +n001057/0260_01.jpg +n001057/0282_02.jpg +n001057/0289_01.jpg +n001057/0291_02.jpg +n001057/0329_02.jpg +n001057/0336_01.jpg +n001057/0336_02.jpg +n001057/0346_01.jpg +n001057/0359_01.jpg +n001057/0375_01.jpg +n001057/0414_02.jpg +n001057/0415_02.jpg +n001057/0416_04.jpg +n001057/0438_01.jpg +n001057/0493_01.jpg +n001057/0501_02.jpg +n001043/0017_01.jpg +n001043/0080_01.jpg +n001043/0083_01.jpg +n001043/0087_01.jpg +n001038/0002_01.jpg +n001038/0019_03.jpg +n001038/0035_01.jpg +n001038/0050_01.jpg +n001038/0060_02.jpg +n001038/0063_01.jpg +n001038/0077_01.jpg +n001038/0090_02.jpg +n001038/0120_04.jpg +n001038/0124_01.jpg +n001038/0128_01.jpg +n001038/0133_01.jpg +n001038/0140_01.jpg +n001038/0149_01.jpg +n001038/0178_01.jpg +n001038/0196_01.jpg +n001038/0198_01.jpg +n001038/0206_02.jpg +n001038/0210_01.jpg +n001038/0233_01.jpg +n001038/0235_01.jpg +n001038/0235_04.jpg +n001038/0282_01.jpg +n001038/0286_01.jpg +n001038/0335_02.jpg +n001038/0335_03.jpg +n001038/0395_01.jpg +n001038/0426_01.jpg +n001038/0458_01.jpg +n001038/0484_02.jpg +n001038/0512_02.jpg +n001037/0087_01.jpg +n001037/0325_01.jpg +n001037/0339_01.jpg +n001037/0366_01.jpg +n001058/0081_01.jpg +n001058/0256_02.jpg +n001058/0282_01.jpg +n001060/0118_02.jpg +n001060/0245_01.jpg +n001060/0249_02.jpg +n001060/0259_02.jpg +n001060/0334_02.jpg +n001060/0355_02.jpg +n001061/0129_01.jpg +n001061/0330_02.jpg +n001061/0342_01.jpg +n001061/0350_01.jpg +n001062/0222_01.jpg +n001063/0040_01.jpg +n001063/0049_01.jpg +n001063/0152_01.jpg +n001063/0155_01.jpg +n001063/0158_01.jpg +n001063/0227_03.jpg +n001063/0424_02.jpg +n001063/0429_01.jpg +n001063/0432_01.jpg +n001063/0442_01.jpg +n001064/0234_01.jpg +n001064/0234_02.jpg +n001064/0276_01.jpg +n001064/0371_01.jpg +n001064/0512_01.jpg +n001065/0065_01.jpg +n001065/0066_01.jpg +n001065/0068_02.jpg +n001065/0070_01.jpg +n001065/0107_01.jpg +n001065/0108_01.jpg +n001065/0125_01.jpg +n001065/0126_02.jpg +n001065/0153_02.jpg +n001065/0215_01.jpg +n001065/0227_01.jpg +n001065/0296_01.jpg +n001065/0326_01.jpg +n001065/0366_01.jpg +n001065/0367_01.jpg +n001065/0379_01.jpg +n001066/0055_01.jpg +n001066/0087_01.jpg +n001066/0122_02.jpg +n001066/0123_01.jpg +n001066/0154_01.jpg +n001066/0174_01.jpg +n001066/0214_01.jpg +n001066/0250_01.jpg +n001066/0300_04.jpg +n001066/0309_01.jpg +n001066/0360_02.jpg +n001066/0388_01.jpg +n001066/0401_01.jpg +n001066/0419_01.jpg +n001066/0504_01.jpg +n001066/0513_01.jpg +n001066/0517_02.jpg +n001067/0093_01.jpg +n001067/0127_02.jpg +n001068/0043_01.jpg +n001068/0062_01.jpg +n001068/0087_01.jpg +n001068/0117_01.jpg +n001068/0174_01.jpg +n001068/0182_03.jpg +n001068/0202_01.jpg +n001068/0351_01.jpg +n001068/0399_05.jpg +n001068/0514_02.jpg +n001069/0202_02.jpg +n001069/0279_01.jpg +n001071/0156_01.jpg +n001071/0317_02.jpg +n001071/0421_01.jpg +n001071/0426_02.jpg +n001072/0044_01.jpg +n001072/0057_02.jpg +n001072/0119_01.jpg +n001072/0138_01.jpg +n001072/0140_01.jpg +n001072/0148_01.jpg +n001072/0184_01.jpg +n001072/0221_01.jpg +n001072/0239_02.jpg +n001072/0250_01.jpg +n001072/0270_01.jpg +n001072/0276_01.jpg +n001072/0293_02.jpg +n001072/0305_01.jpg +n001072/0310_01.jpg +n001072/0348_01.jpg +n001072/0375_01.jpg +n001073/0174_01.jpg +n001073/0175_01.jpg +n001073/0210_01.jpg +n001073/0238_02.jpg +n001073/0261_02.jpg +n001073/0310_01.jpg +n001074/0038_01.jpg +n001074/0046_01.jpg +n001074/0116_01.jpg +n001074/0130_01.jpg +n001074/0176_05.jpg +n001074/0189_02.jpg +n001074/0201_01.jpg +n001074/0204_01.jpg +n001074/0208_01.jpg +n001075/0163_02.jpg +n001075/0201_01.jpg +n001075/0221_01.jpg +n001075/0312_01.jpg +n001076/0180_02.jpg +n001076/0222_01.jpg +n001076/0234_02.jpg +n001076/0242_01.jpg +n001076/0265_01.jpg +n001076/0285_01.jpg +n001076/0292_01.jpg +n001077/0094_01.jpg +n001077/0244_01.jpg +n001077/0252_02.jpg +n001077/0254_02.jpg +n001077/0266_01.jpg +n001077/0267_02.jpg +n001077/0346_01.jpg +n001077/0389_02.jpg +n001077/0400_01.jpg +n001078/0030_01.jpg +n001078/0089_01.jpg +n001078/0127_01.jpg +n001078/0222_02.jpg +n001078/0231_01.jpg +n001078/0231_02.jpg +n001078/0349_01.jpg +n001078/0384_02.jpg +n001079/0005_01.jpg +n001079/0072_01.jpg +n001080/0001_01.jpg +n001080/0252_03.jpg +n001080/0268_01.jpg +n001080/0325_01.jpg +n001081/0197_02.jpg +n001081/0204_02.jpg +n001081/0214_01.jpg +n001081/0246_01.jpg +n001082/0379_01.jpg +n001082/0335_01.jpg +n001082/0420_02.jpg +n001083/0092_01.jpg +n001083/0117_01.jpg +n001083/0119_01.jpg +n001083/0141_01.jpg +n001083/0159_01.jpg +n001083/0177_02.jpg +n001083/0202_01.jpg +n001083/0223_01.jpg +n001083/0223_03.jpg +n001083/0378_02.jpg +n001084/0009_01.jpg +n001084/0031_04.jpg +n001084/0085_02.jpg +n001084/0081_01.jpg +n001084/0088_01.jpg +n001084/0090_02.jpg +n001084/0099_02.jpg +n001084/0217_02.jpg +n001084/0255_02.jpg +n001084/0267_01.jpg +n001084/0279_02.jpg +n001084/0295_01.jpg +n001084/0511_02.jpg +n001084/0544_01.jpg +n001084/0551_01.jpg +n001084/0566_02.jpg +n001085/0017_01.jpg +n001085/0056_02.jpg +n001085/0076_01.jpg +n001085/0191_01.jpg +n001085/0193_01.jpg +n001085/0206_02.jpg +n001085/0240_01.jpg +n001085/0259_01.jpg +n001086/0061_02.jpg +n001086/0140_01.jpg +n001086/0140_02.jpg +n001086/0168_02.jpg +n001086/0192_01.jpg +n001086/0230_02.jpg +n001086/0260_01.jpg +n001086/0279_01.jpg +n001087/0303_01.jpg +n001088/0043_02.jpg +n001088/0139_01.jpg +n001088/0169_01.jpg +n001088/0169_02.jpg +n001088/0253_02.jpg +n001088/0255_01.jpg +n001088/0329_01.jpg +n001088/0346_01.jpg +n001088/0347_02.jpg +n001088/0360_01.jpg +n001089/0002_02.jpg +n001089/0104_01.jpg +n001089/0319_01.jpg +n001089/0322_01.jpg +n001090/0036_01.jpg +n001090/0108_01.jpg +n001090/0299_01.jpg +n001090/0319_01.jpg +n001090/0388_01.jpg +n001090/0391_01.jpg +n001090/0396_01.jpg +n001090/0399_01.jpg +n001090/0488_01.jpg +n001091/0088_02.jpg +n001091/0129_02.jpg +n001091/0177_03.jpg +n001091/0177_04.jpg +n001091/0266_02.jpg +n001091/0297_01.jpg +n001091/0316_03.jpg +n001091/0514_02.jpg +n001091/0526_01.jpg +n001091/0538_02.jpg +n001091/0552_01.jpg +n001091/0552_02.jpg +n001091/0554_02.jpg +n001092/0078_01.jpg +n001092/0079_01.jpg +n001092/0094_01.jpg +n001092/0170_01.jpg +n001092/0179_01.jpg +n001092/0192_01.jpg +n001092/0226_01.jpg +n001092/0228_01.jpg +n001092/0237_03.jpg +n001092/0275_01.jpg +n001092/0294_02.jpg +n001092/0301_01.jpg +n001093/0029_01.jpg +n001093/0168_02.jpg +n001093/0202_01.jpg +n001093/0250_01.jpg +n001093/0271_01.jpg +n001093/0287_01.jpg +n001093/0313_01.jpg +n001093/0359_01.jpg +n001093/0391_02.jpg +n001093/0402_01.jpg +n001093/0425_01.jpg +n001094/0187_01.jpg +n001094/0197_01.jpg +n001094/0206_01.jpg +n001094/0218_01.jpg +n001094/0254_01.jpg +n001094/0263_01.jpg +n001094/0311_03.jpg +n001094/0339_01.jpg +n001094/0340_01.jpg +n001094/0417_01.jpg +n001094/0447_02.jpg +n001094/0453_02.jpg +n001094/0479_01.jpg +n001094/0481_01.jpg +n001094/0494_01.jpg +n001095/0011_01.jpg +n001095/0127_01.jpg +n001095/0138_01.jpg +n001095/0369_01.jpg +n001095/0370_01.jpg +n001095/0379_02.jpg +n001095/0449_01.jpg +n001096/0082_02.jpg +n001096/0110_03.jpg +n001096/0150_01.jpg +n001096/0226_02.jpg +n001096/0274_02.jpg +n001096/0275_03.jpg +n001096/0278_01.jpg +n001096/0284_02.jpg +n001096/0298_01.jpg +n001096/0303_02.jpg +n001096/0318_02.jpg +n001096/0320_01.jpg +n001096/0332_03.jpg +n001096/0336_01.jpg +n001096/0340_02.jpg +n001096/0410_02.jpg +n001097/0073_02.jpg +n001097/0091_01.jpg +n001097/0091_04.jpg +n001097/0133_02.jpg +n001097/0136_03.jpg +n001097/0155_04.jpg +n001097/0197_01.jpg +n001097/0198_01.jpg +n001097/0241_02.jpg +n001097/0275_02.jpg +n001098/0107_01.jpg +n001098/0148_01.jpg +n001098/0170_02.jpg +n001098/0171_01.jpg +n001098/0212_02.jpg +n001098/0219_01.jpg +n001098/0244_01.jpg +n001098/0490_01.jpg +n001098/0502_01.jpg +n001099/0074_02.jpg +n001099/0078_01.jpg +n001099/0140_01.jpg +n001099/0206_01.jpg +n001099/0212_01.jpg +n001099/0216_01.jpg +n001099/0221_02.jpg +n001099/0244_03.jpg +n001100/0045_01.jpg +n001100/0057_01.jpg +n001100/0062_02.jpg +n001100/0063_01.jpg +n001100/0089_01.jpg +n001100/0111_02.jpg +n001100/0127_01.jpg +n001100/0199_01.jpg +n001100/0205_02.jpg +n001100/0206_02.jpg +n001100/0210_04.jpg +n001100/0248_01.jpg +n001100/0250_02.jpg +n001100/0268_01.jpg +n001100/0269_01.jpg +n001100/0270_01.jpg +n001100/0305_02.jpg +n001100/0319_04.jpg +n001100/0371_02.jpg +n001100/0388_01.jpg +n001100/0390_02.jpg +n001100/0395_01.jpg +n001100/0396_01.jpg +n001100/0409_01.jpg +n001100/0411_02.jpg +n001100/0423_02.jpg +n001101/0027_01.jpg +n001101/0034_03.jpg +n001101/0146_01.jpg +n001101/0172_01.jpg +n001101/0221_01.jpg +n001101/0235_01.jpg +n001101/0258_01.jpg +n001101/0271_01.jpg +n001101/0275_01.jpg +n001101/0284_01.jpg +n001102/0048_01.jpg +n001102/0050_02.jpg +n001102/0092_01.jpg +n001102/0201_01.jpg +n001102/0250_01.jpg +n001103/0020_01.jpg +n001103/0111_01.jpg +n001103/0124_02.jpg +n001103/0130_01.jpg +n001103/0186_01.jpg +n001103/0188_01.jpg +n001103/0190_02.jpg +n001103/0201_01.jpg +n001103/0217_02.jpg +n001103/0225_01.jpg +n001103/0242_01.jpg +n001104/0105_02.jpg +n001104/0106_02.jpg +n001104/0181_02.jpg +n001104/0255_01.jpg +n001104/0255_02.jpg +n001104/0272_01.jpg +n001104/0316_02.jpg +n001104/0353_01.jpg +n001105/0052_02.jpg +n001105/0092_01.jpg +n001105/0213_01.jpg +n001105/0214_01.jpg +n001105/0266_01.jpg +n001105/0303_02.jpg +n001105/0316_01.jpg +n001105/0323_01.jpg +n001105/0351_01.jpg +n001105/0377_01.jpg +n001105/0425_01.jpg +n001105/0434_02.jpg +n001105/0432_01.jpg +n001106/0041_01.jpg +n001106/0079_01.jpg +n001106/0101_01.jpg +n001106/0171_01.jpg +n001106/0189_01.jpg +n001106/0244_01.jpg +n001106/0301_01.jpg +n001106/0362_01.jpg +n001106/0411_01.jpg +n001106/0428_02.jpg +n001106/0446_02.jpg +n001108/0032_01.jpg +n001108/0057_01.jpg +n001108/0073_01.jpg +n001108/0193_01.jpg +n001108/0213_02.jpg +n001108/0288_01.jpg +n001108/0357_01.jpg +n001108/0444_01.jpg +n001109/0195_01.jpg +n001109/0197_01.jpg +n001109/0198_01.jpg +n001109/0204_01.jpg +n001109/0205_01.jpg +n001109/0209_01.jpg +n001109/0221_01.jpg +n001109/0324_01.jpg +n001109/0396_01.jpg +n001109/0403_01.jpg +n001110/0220_02.jpg +n001111/0060_01.jpg +n001111/0190_01.jpg +n001111/0193_01.jpg +n001111/0223_01.jpg +n001111/0242_01.jpg +n001111/0280_01.jpg +n001111/0276_02.jpg +n001111/0377_01.jpg +n001111/0393_02.jpg +n001111/0426_02.jpg +n001112/0164_01.jpg +n001112/0186_02.jpg +n001112/0234_01.jpg +n001113/0190_01.jpg +n001113/0289_01.jpg +n001113/0290_01.jpg +n001113/0293_01.jpg +n001113/0302_01.jpg +n001113/0423_01.jpg +n001113/0443_02.jpg +n001114/0042_01.jpg +n001114/0159_03.jpg +n001114/0234_01.jpg +n001114/0547_02.jpg +n001114/0558_02.jpg +n001115/0008_01.jpg +n001115/0031_01.jpg +n001115/0089_01.jpg +n001115/0162_01.jpg +n001115/0166_02.jpg +n001115/0168_01.jpg +n001115/0227_02.jpg +n001115/0254_01.jpg +n001115/0273_01.jpg +n001115/0279_06.jpg +n001115/0374_01.jpg +n001115/0397_01.jpg +n001116/0021_02.jpg +n001116/0038_01.jpg +n001116/0102_02.jpg +n001116/0122_02.jpg +n001116/0300_02.jpg +n001116/0311_02.jpg +n001117/0132_02.jpg +n001117/0142_01.jpg +n001117/0186_01.jpg +n001117/0218_01.jpg +n001117/0295_01.jpg +n001117/0296_02.jpg +n001117/0300_02.jpg +n001117/0312_02.jpg +n001117/0336_01.jpg +n001117/0439_01.jpg +n001118/0004_01.jpg +n001118/0061_01.jpg +n001119/0029_01.jpg +n001119/0093_01.jpg +n001119/0110_01.jpg +n001119/0166_01.jpg +n001119/0185_01.jpg +n001119/0196_01.jpg +n001119/0214_01.jpg +n001119/0220_01.jpg +n001119/0223_02.jpg +n001119/0239_01.jpg +n001119/0244_01.jpg +n001119/0264_01.jpg +n001119/0291_01.jpg +n001119/0378_01.jpg +n001119/0384_01.jpg +n001120/0006_02.jpg +n001120/0060_03.jpg +n001120/0216_04.jpg +n001120/0259_01.jpg +n001120/0337_02.jpg +n001120/0364_02.jpg +n001121/0152_03.jpg +n001121/0175_01.jpg +n001121/0276_01.jpg +n001121/0392_01.jpg +n001122/0069_02.jpg +n001122/0226_01.jpg +n001122/0244_01.jpg +n001122/0248_01.jpg +n001122/0381_02.jpg +n001122/0494_01.jpg +n001123/0068_01.jpg +n001123/0106_02.jpg +n001123/0204_01.jpg +n001123/0240_01.jpg +n001123/0269_01.jpg +n001123/0354_02.jpg +n001123/0382_01.jpg +n001124/0075_01.jpg +n001124/0215_01.jpg +n001124/0294_01.jpg +n001124/0404_01.jpg +n001124/0410_02.jpg +n001124/0443_01.jpg +n001126/0188_01.jpg +n001126/0230_01.jpg +n001128/0039_02.jpg +n001128/0063_01.jpg +n001128/0073_02.jpg +n001128/0110_02.jpg +n001128/0142_03.jpg +n001128/0167_01.jpg +n001128/0185_01.jpg +n001128/0317_01.jpg +n001129/0111_01.jpg +n001129/0187_01.jpg +n001129/0220_01.jpg +n001129/0230_01.jpg +n001129/0259_01.jpg +n001129/0309_01.jpg +n001129/0325_03.jpg +n001129/0367_02.jpg +n001129/0414_01.jpg +n001129/0430_01.jpg +n001129/0426_02.jpg +n001129/0435_03.jpg +n001130/0004_01.jpg +n001130/0009_01.jpg +n001130/0040_01.jpg +n001130/0112_01.jpg +n001130/0117_01.jpg +n001130/0185_02.jpg +n001130/0205_01.jpg +n001130/0211_01.jpg +n001130/0362_01.jpg +n001130/0411_01.jpg +n001130/0391_01.jpg +n001130/0441_01.jpg +n001131/0010_03.jpg +n001131/0051_01.jpg +n001131/0058_03.jpg +n001131/0087_01.jpg +n001131/0106_02.jpg +n001131/0116_01.jpg +n001131/0133_01.jpg +n001131/0151_01.jpg +n001131/0191_01.jpg +n001131/0280_01.jpg +n001131/0332_01.jpg +n001131/0333_01.jpg +n001131/0429_01.jpg +n001131/0441_01.jpg +n001131/0495_01.jpg +n001132/0020_02.jpg +n001132/0125_01.jpg +n001132/0126_02.jpg +n001132/0171_02.jpg +n001132/0202_01.jpg +n001132/0207_05.jpg +n001132/0240_02.jpg +n001132/0244_01.jpg +n001132/0353_01.jpg +n001132/0378_01.jpg +n001132/0410_03.jpg +n001132/0438_01.jpg +n001132/0491_02.jpg +n001132/0500_01.jpg +n001132/0508_02.jpg +n001132/0527_01.jpg +n001132/0610_02.jpg +n001133/0387_01.jpg +n001134/0214_01.jpg +n001134/0474_02.jpg +n001134/0509_01.jpg +n001134/0525_01.jpg +n001135/0017_01.jpg +n001135/0033_02.jpg +n001135/0056_01.jpg +n001135/0071_01.jpg +n001135/0098_01.jpg +n001135/0116_01.jpg +n001135/0146_02.jpg +n001135/0163_01.jpg +n001135/0211_03.jpg +n001135/0252_03.jpg +n001135/0255_01.jpg +n001135/0265_01.jpg +n001135/0274_02.jpg +n001135/0311_03.jpg +n001135/0352_03.jpg +n001136/0279_02.jpg +n001136/0316_02.jpg +n001137/0059_02.jpg +n001137/0073_02.jpg +n001138/0220_01.jpg +n001138/0295_01.jpg +n001138/0312_01.jpg +n001138/0345_02.jpg +n001138/0578_01.jpg +n001139/0347_03.jpg +n001139/0354_02.jpg +n001139/0356_01.jpg +n001140/0126_03.jpg +n001140/0316_01.jpg +n001142/0005_02.jpg +n001142/0014_01.jpg +n001142/0057_01.jpg +n001142/0110_02.jpg +n001142/0191_01.jpg +n001142/0241_02.jpg +n001142/0243_01.jpg +n001142/0347_01.jpg +n001142/0457_02.jpg +n001142/0459_01.jpg +n001142/0484_01.jpg +n001142/0493_01.jpg +n001143/0060_01.jpg +n001143/0070_02.jpg +n001143/0075_03.jpg +n001143/0097_01.jpg +n001143/0110_01.jpg +n001143/0144_01.jpg +n001143/0177_02.jpg +n001143/0192_03.jpg +n001143/0192_05.jpg +n001143/0197_02.jpg +n001143/0198_01.jpg +n001143/0198_03.jpg +n001143/0213_01.jpg +n001143/0215_02.jpg +n001143/0256_01.jpg +n001143/0301_01.jpg +n001143/0318_02.jpg +n001143/0331_02.jpg +n001143/0488_01.jpg +n001144/0056_01.jpg +n001144/0272_01.jpg +n001144/0342_01.jpg +n001145/0006_02.jpg +n001145/0033_01.jpg +n001145/0038_03.jpg +n001145/0047_01.jpg +n001145/0147_01.jpg +n001145/0323_01.jpg +n001145/0358_03.jpg +n001145/0399_01.jpg +n001145/0422_01.jpg +n001145/0476_01.jpg +n001145/0556_02.jpg +n001145/0582_01.jpg +n001147/0099_02.jpg +n001147/0165_01.jpg +n001147/0350_01.jpg +n001147/0365_05.jpg +n001147/0367_01.jpg +n001147/0374_03.jpg +n001147/0432_01.jpg +n001148/0005_01.jpg +n001148/0067_02.jpg +n001148/0077_01.jpg +n001148/0101_01.jpg +n001148/0112_01.jpg +n001148/0156_01.jpg +n001148/0220_01.jpg +n001148/0232_03.jpg +n001148/0265_02.jpg +n001148/0275_02.jpg +n001148/0303_01.jpg +n001148/0364_01.jpg +n001148/0377_01.jpg +n001148/0419_01.jpg +n001148/0421_01.jpg +n001148/0422_01.jpg +n001148/0423_02.jpg +n001148/0434_01.jpg +n001148/0477_02.jpg +n001148/0487_02.jpg +n001148/0514_01.jpg +n001148/0533_01.jpg +n001150/0069_01.jpg +n001150/0072_02.jpg +n001150/0117_01.jpg +n001150/0123_01.jpg +n001150/0127_01.jpg +n001150/0128_03.jpg +n001150/0187_01.jpg +n001150/0349_01.jpg +n001150/0439_01.jpg +n001150/0464_01.jpg +n001151/0152_01.jpg +n001151/0149_03.jpg +n001151/0222_01.jpg +n001152/0016_01.jpg +n001152/0017_01.jpg +n001152/0059_05.jpg +n001152/0068_01.jpg +n001152/0137_04.jpg +n001152/0169_03.jpg +n001152/0207_02.jpg +n001152/0218_01.jpg +n001152/0244_01.jpg +n001152/0281_02.jpg +n001152/0331_01.jpg +n001154/0109_01.jpg +n001155/0073_01.jpg +n001155/0112_02.jpg +n001155/0158_02.jpg +n001155/0270_01.jpg +n001155/0378_01.jpg +n001155/0444_01.jpg +n001155/0448_01.jpg +n001157/0055_01.jpg +n001158/0037_01.jpg +n001158/0109_02.jpg +n001158/0117_01.jpg +n001158/0156_01.jpg +n001158/0163_04.jpg +n001158/0172_01.jpg +n001158/0202_01.jpg +n001158/0220_01.jpg +n001158/0230_01.jpg +n001158/0232_01.jpg +n001158/0238_01.jpg +n001158/0244_01.jpg +n001159/0006_01.jpg +n001159/0037_01.jpg +n001159/0096_01.jpg +n001159/0179_01.jpg +n001159/0190_02.jpg +n001159/0267_01.jpg +n001159/0271_01.jpg +n001159/0358_01.jpg +n001159/0361_01.jpg +n001159/0363_01.jpg +n001159/0365_01.jpg +n001159/0381_04.jpg +n001159/0401_01.jpg +n001159/0443_02.jpg +n001159/0446_01.jpg +n001159/0474_01.jpg +n001159/0475_01.jpg +n001160/0016_01.jpg +n001160/0041_01.jpg +n001160/0052_01.jpg +n001161/0001_01.jpg +n001160/0053_01.jpg +n001160/0056_01.jpg +n001160/0057_01.jpg +n001160/0119_01.jpg +n001160/0123_02.jpg +n001160/0124_02.jpg +n001160/0124_03.jpg +n001160/0150_01.jpg +n001160/0150_01.jpg +n001160/0189_02.jpg +n001160/0395_01.jpg +n001160/0407_01.jpg +n001160/0413_02.jpg +n001160/0418_02.jpg +n001160/0419_02.jpg +n001160/0427_02.jpg +n001160/0430_02.jpg +n001161/0029_01.jpg +n001161/0035_01.jpg +n001161/0060_01.jpg +n001161/0126_01.jpg +n001161/0260_02.jpg +n001161/0282_01.jpg +n001161/0292_01.jpg +n001161/0310_03.jpg +n001161/0323_01.jpg +n001161/0446_01.jpg +n001161/0477_02.jpg +n001162/0026_01.jpg +n001162/0102_01.jpg +n001163/0202_01.jpg +n001163/0245_01.jpg +n001163/0267_01.jpg +n001163/0323_04.jpg +n001164/0005_01.jpg +n001164/0030_01.jpg +n001164/0067_01.jpg +n001164/0076_01.jpg +n001164/0131_01.jpg +n001164/0135_01.jpg +n001164/0152_02.jpg +n001164/0177_01.jpg +n001164/0212_01.jpg +n001164/0242_05.jpg +n001164/0254_02.jpg +n001164/0368_01.jpg +n001164/0433_01.jpg +n001164/0631_01.jpg +n001165/0063_01.jpg +n001165/0104_02.jpg +n001165/0141_03.jpg +n001165/0176_02.jpg +n001165/0185_01.jpg +n001165/0292_01.jpg +n001165/0298_01.jpg +n001165/0300_01.jpg +n001165/0302_01.jpg +n001165/0310_03.jpg +n001165/0336_01.jpg +n001165/0462_04.jpg +n001166/0462_01.jpg +n001167/0077_01.jpg +n001168/0041_01.jpg +n001168/0068_01.jpg +n001168/0323_01.jpg +n001168/0348_01.jpg +n001168/0350_01.jpg +n001169/0020_01.jpg +n001169/0028_01.jpg +n001169/0030_02.jpg +n001169/0137_01.jpg +n001169/0150_01.jpg +n001169/0200_01.jpg +n001169/0223_02.jpg +n001169/0276_01.jpg +n001169/0281_01.jpg +n001169/0290_02.jpg +n001169/0451_02.jpg +n001170/0068_01.jpg +n001170/0148_01.jpg +n001170/0249_01.jpg +n001170/0285_01.jpg +n001170/0403_01.jpg +n001170/0443_01.jpg +n001170/0458_01.jpg +n001170/0472_01.jpg +n001170/0481_02.jpg +n001170/0484_02.jpg +n001171/0206_01.jpg +n001172/0033_02.jpg +n001172/0031_01.jpg +n001172/0043_02.jpg +n001172/0048_01.jpg +n001172/0068_01.jpg +n001172/0100_01.jpg +n001172/0175_01.jpg +n001172/0185_01.jpg +n001172/0201_01.jpg +n001172/0212_01.jpg +n001172/0267_03.jpg +n001172/0279_01.jpg +n001172/0385_01.jpg +n001173/0073_04.jpg +n001173/0108_01.jpg +n001173/0170_01.jpg +n001173/0190_01.jpg +n001173/0337_02.jpg +n001175/0271_01.jpg +n001175/0273_02.jpg +n001175/0348_02.jpg +n001176/0381_01.jpg +n001177/0335_01.jpg +n001178/0035_01.jpg +n001178/0069_01.jpg +n001178/0119_01.jpg +n001178/0150_03.jpg +n001178/0170_04.jpg +n001178/0216_01.jpg +n001178/0292_01.jpg +n001178/0313_01.jpg +n001178/0313_02.jpg +n001178/0318_02.jpg +n001178/0338_02.jpg +n001178/0365_02.jpg +n001178/0377_02.jpg +n001178/0450_01.jpg +n001179/0035_02.jpg +n001179/0531_01.jpg +n001180/0007_01.jpg +n001180/0027_01.jpg +n001180/0033_01.jpg +n001180/0050_01.jpg +n001180/0069_01.jpg +n001180/0072_01.jpg +n001180/0101_02.jpg +n001180/0126_01.jpg +n001180/0142_01.jpg +n001180/0153_01.jpg +n001180/0161_01.jpg +n001180/0186_01.jpg +n001180/0220_01.jpg +n001180/0236_03.jpg +n001180/0249_01.jpg +n001180/0278_01.jpg +n001181/0123_01.jpg +n001181/0181_01.jpg +n001181/0235_01.jpg +n001181/0281_01.jpg +n001181/0290_02.jpg +n001181/0302_01.jpg +n001181/0309_01.jpg +n001181/0321_02.jpg +n001181/0368_02.jpg +n001181/0369_01.jpg +n001182/0020_04.jpg +n001182/0074_01.jpg +n001182/0094_02.jpg +n001182/0239_01.jpg +n001182/0262_01.jpg +n001182/0372_02.jpg +n001182/0404_03.jpg +n001183/0020_01.jpg +n001184/0038_01.jpg +n001184/0228_01.jpg +n001184/0324_01.jpg +n001184/0328_01.jpg +n001184/0358_01.jpg +n001185/0062_01.jpg +n001185/0752_01.jpg +n001186/0144_02.jpg +n001186/0364_01.jpg +n001187/0079_01.jpg +n001187/0084_01.jpg +n001187/0086_01.jpg +n001187/0207_01.jpg +n001187/0227_02.jpg +n001187/0228_01.jpg +n001187/0356_03.jpg +n001187/0394_01.jpg +n001187/0001_01.jpg +n001188/0027_01.jpg +n001188/0082_02.jpg +n001188/0128_03.jpg +n001188/0203_01.jpg +n001188/0237_01.jpg +n001188/0267_02.jpg +n001188/0291_02.jpg +n001188/0317_01.jpg +n001188/0353_01.jpg +n001188/0420_01.jpg +n001189/0004_01.jpg +n001189/0011_01.jpg +n001189/0088_01.jpg +n001189/0105_02.jpg +n001189/0127_01.jpg +n001189/0181_02.jpg +n001189/0287_02.jpg +n001189/0289_01.jpg +n001189/0297_02.jpg +n001189/0356_01.jpg +n001189/0426_02.jpg +n001191/0110_01.jpg +n001191/0282_01.jpg +n001192/0055_01.jpg +n001192/0174_01.jpg +n001192/0233_02.jpg +n001192/0259_01.jpg +n001192/0274_01.jpg +n001193/0100_02.jpg +n001193/0239_01.jpg +n001194/0068_01.jpg +n001194/0145_02.jpg +n001194/0200_01.jpg +n001194/0331_01.jpg +n001194/0351_01.jpg +n001194/0359_01.jpg +n001195/0121_01.jpg +n001195/0293_01.jpg +n001196/0046_01.jpg +n001196/0046_02.jpg +n001196/0075_02.jpg +n001196/0102_01.jpg +n001196/0114_01.jpg +n001196/0120_01.jpg +n001196/0218_03.jpg +n001198/0075_02.jpg +n001198/0218_01.jpg +n001198/0350_01.jpg +n001198/0403_01.jpg +n001198/0492_01.jpg +n001198/0492_02.jpg +n001198/0497_01.jpg +n001198/0499_01.jpg +n001198/0534_01.jpg +n001198/0551_01.jpg +n001198/0551_02.jpg +n001200/0095_01.jpg +n001200/0107_01.jpg +n001200/0122_01.jpg +n001200/0170_01.jpg +n001200/0212_01.jpg +n001200/0236_01.jpg +n001200/0248_01.jpg +n001200/0262_02.jpg +n001200/0310_01.jpg +n001200/0358_01.jpg +n001200/0429_01.jpg +n001200/0439_03.jpg +n001200/0443_03.jpg +n001200/0454_01.jpg +n001200/0488_01.jpg +n001200/0546_02.jpg +n001200/0552_02.jpg +n001200/0569_01.jpg +n001200/0571_01.jpg +n001200/0581_02.jpg +n001200/0585_01.jpg +n001201/0013_01.jpg +n001201/0053_01.jpg +n001201/0087_01.jpg +n001201/0113_01.jpg +n001201/0123_01.jpg +n001201/0154_01.jpg +n001201/0151_01.jpg +n001201/0257_01.jpg +n001201/0364_01.jpg +n001203/0009_01.jpg +n001203/0011_02.jpg +n001203/0073_01.jpg +n001203/0076_02.jpg +n001203/0083_03.jpg +n001203/0109_04.jpg +n001203/0119_02.jpg +n001203/0148_01.jpg +n001203/0170_01.jpg +n001203/0236_02.jpg +n001203/0423_01.jpg +n001204/0044_01.jpg +n001204/0091_01.jpg +n001204/0111_02.jpg +n001204/0153_01.jpg +n001204/0204_02.jpg +n001204/0219_01.jpg +n001204/0247_01.jpg +n001204/0403_02.jpg +n001204/0417_02.jpg +n001204/0421_01.jpg +n001204/0529_02.jpg +n001204/0601_01.jpg +n001205/0143_01.jpg +n001205/0215_01.jpg +n001206/0274_01.jpg +n001206/0349_01.jpg +n001207/0006_01.jpg +n001208/0071_02.jpg +n001208/0112_01.jpg +n001208/0113_01.jpg +n001208/0121_01.jpg +n001208/0123_01.jpg +n001208/0131_03.jpg +n001208/0455_01.jpg +n001209/0031_01.jpg +n001209/0097_01.jpg +n001209/0313_02.jpg +n001210/0038_01.jpg +n001210/0205_02.jpg +n001210/0213_01.jpg +n001210/0213_02.jpg +n001210/0226_02.jpg +n001210/0226_01.jpg +n001210/0319_01.jpg +n001210/0319_02.jpg +n001210/0329_02.jpg +n001212/0014_02.jpg +n001212/0035_01.jpg +n001212/0056_01.jpg +n001212/0092_01.jpg +n001212/0166_01.jpg +n001212/0178_01.jpg +n001212/0226_02.jpg +n001212/0246_01.jpg +n001212/0257_01.jpg +n001212/0276_01.jpg +n001212/0317_01.jpg +n001213/0025_02.jpg +n001213/0041_01.jpg +n001213/0092_02.jpg +n001213/0126_02.jpg +n001213/0134_02.jpg +n001213/0141_01.jpg +n001213/0196_01.jpg +n001213/0255_01.jpg +n001213/0423_01.jpg +n001214/0014_01.jpg +n001214/0044_01.jpg +n001215/0003_01.jpg +n001215/0008_02.jpg +n001215/0045_01.jpg +n001215/0090_01.jpg +n001215/0100_01.jpg +n001215/0126_01.jpg +n001216/0001_01.jpg +n001216/0007_01.jpg +n001216/0025_01.jpg +n001216/0040_14.jpg +n001216/0045_01.jpg +n001216/0127_02.jpg +n001216/0192_01.jpg +n001216/0247_01.jpg +n001217/0048_01.jpg +n001217/0122_01.jpg +n001217/0454_01.jpg +n001217/0459_01.jpg +n001218/0003_04.jpg +n001218/0006_04.jpg +n001218/0023_01.jpg +n001218/0089_01.jpg +n001218/0106_03.jpg +n001218/0116_04.jpg +n001218/0218_02.jpg +n001218/0229_01.jpg +n001218/0273_03.jpg +n001218/0283_01.jpg +n001218/0287_01.jpg +n001218/0327_01.jpg +n001218/0364_02.jpg +n001218/0374_02.jpg +n001218/0420_01.jpg +n001218/0424_02.jpg +n001218/0462_02.jpg +n001219/0025_01.jpg +n001219/0068_01.jpg +n001219/0136_02.jpg +n001219/0141_01.jpg +n001219/0141_03.jpg +n001219/0211_01.jpg +n001219/0211_02.jpg +n001220/0003_01.jpg +n001220/0074_01.jpg +n001220/0119_01.jpg +n001220/0120_01.jpg +n001220/0202_01.jpg +n001220/0208_02.jpg +n001220/0304_01.jpg +n001220/0328_01.jpg +n001220/0350_01.jpg +n001220/0364_01.jpg +n001220/0367_01.jpg +n001220/0368_01.jpg +n001221/0170_01.jpg +n001221/0203_01.jpg +n001221/0252_01.jpg +n001221/0255_01.jpg +n001221/0373_01.jpg +n001221/0494_02.jpg +n001221/0533_01.jpg +n001222/0082_01.jpg +n001222/0138_01.jpg +n001222/0333_01.jpg +n001222/0454_01.jpg +n001223/0039_01.jpg +n001223/0035_01.jpg +n001223/0042_01.jpg +n001223/0042_02.jpg +n001223/0076_01.jpg +n001223/0142_02.jpg +n001223/0217_02.jpg +n001223/0277_01.jpg +n001223/0279_01.jpg +n001223/0323_01.jpg +n001223/0407_01.jpg +n001223/0413_02.jpg +n001223/0429_01.jpg +n001224/0013_02.jpg +n001224/0063_01.jpg +n001224/0199_02.jpg +n001224/0222_02.jpg +n001224/0303_01.jpg +n001224/0396_02.jpg +n001224/0414_02.jpg +n001224/0428_01.jpg +n001224/0452_01.jpg +n001224/0459_03.jpg +n001224/0499_01.jpg +n001225/0073_01.jpg +n001225/0354_01.jpg +n001225/0364_01.jpg +n001225/0388_01.jpg +n001225/0451_01.jpg +n001225/0451_02.jpg +n001225/0483_02.jpg +n001225/0559_01.jpg +n001226/0090_01.jpg +n001226/0128_02.jpg +n001226/0145_05.jpg +n001226/0182_02.jpg +n001226/0216_01.jpg +n001226/0430_01.jpg +n001226/0443_01.jpg +n001226/0533_01.jpg +n001227/0014_01.jpg +n001227/0014_04.jpg +n001227/0021_02.jpg +n001227/0033_01.jpg +n001227/0126_02.jpg +n001227/0167_02.jpg +n001227/0179_01.jpg +n001227/0200_02.jpg +n001227/0203_03.jpg +n001227/0203_04.jpg +n001227/0232_01.jpg +n001227/0236_02.jpg +n001227/0239_01.jpg +n001227/0250_02.jpg +n001227/0330_02.jpg +n001227/0345_01.jpg +n001227/0424_01.jpg +n001227/0476_02.jpg +n001228/0004_02.jpg +n001228/0013_01.jpg +n001228/0218_01.jpg +n001228/0401_01.jpg +n001228/0417_01.jpg +n001229/0019_02.jpg +n001229/0038_01.jpg +n001229/0117_01.jpg +n001229/0162_02.jpg +n001229/0213_01.jpg +n001229/0216_01.jpg +n001229/0275_02.jpg +n001229/0299_02.jpg +n001230/0001_04.jpg +n001230/0005_01.jpg +n001230/0016_01.jpg +n001230/0018_02.jpg +n001230/0021_01.jpg +n001230/0023_01.jpg +n001230/0030_01.jpg +n001230/0045_02.jpg +n001230/0048_02.jpg +n001230/0048_05.jpg +n001230/0075_01.jpg +n001230/0080_02.jpg +n001230/0088_02.jpg +n001230/0120_01.jpg +n001230/0265_01.jpg +n001230/0365_01.jpg +n001230/0365_03.jpg +n001230/0415_02.jpg +n001231/0015_01.jpg +n001231/0034_02.jpg +n001231/0125_01.jpg +n001231/0144_01.jpg +n001231/0162_02.jpg +n001231/0159_02.jpg +n001231/0166_01.jpg +n001231/0168_01.jpg +n001231/0173_01.jpg +n001231/0183_01.jpg +n001231/0184_01.jpg +n001231/0210_01.jpg +n001231/0266_01.jpg +n001231/0277_01.jpg +n001231/0290_01.jpg +n001232/0037_01.jpg +n001232/0065_02.jpg +n001232/0072_02.jpg +n001232/0100_01.jpg +n001232/0150_02.jpg +n001232/0257_01.jpg +n001232/0345_01.jpg +n001233/0184_01.jpg +n001233/0217_01.jpg +n001234/0018_01.jpg +n001234/0236_01.jpg +n001234/0450_02.jpg +n001234/0469_02.jpg +n001235/0064_02.jpg +n001235/0162_01.jpg +n001235/0199_01.jpg +n001235/0238_01.jpg +n001235/0342_01.jpg +n001235/0404_01.jpg +n001235/0446_02.jpg +n001236/0004_01.jpg +n001236/0041_02.jpg +n001236/0050_02.jpg +n001236/0073_01.jpg +n001236/0084_01.jpg +n001236/0089_01.jpg +n001236/0092_02.jpg +n001236/0100_01.jpg +n001236/0120_01.jpg +n001236/0139_01.jpg +n001236/0143_04.jpg +n001236/0154_01.jpg +n001236/0193_01.jpg +n001236/0255_01.jpg +n001236/0285_01.jpg +n001236/0291_01.jpg +n001236/0304_01.jpg +n001236/0343_02.jpg +n001236/0347_01.jpg +n001236/0348_01.jpg +n001236/0358_01.jpg +n001236/0363_01.jpg +n001236/0363_02.jpg +n001236/0370_02.jpg +n001236/0407_01.jpg +n001237/0110_02.jpg +n001237/0312_01.jpg +n001238/0124_01.jpg +n001238/0186_01.jpg +n001238/0286_01.jpg +n001238/0324_02.jpg +n001238/0340_01.jpg +n001240/0040_01.jpg +n001240/0046_02.jpg +n001240/0192_01.jpg +n001240/0192_02.jpg +n001240/0196_01.jpg +n001240/0256_01.jpg +n001241/0034_01.jpg +n001241/0195_01.jpg +n001241/0210_01.jpg +n001241/0261_01.jpg +n001241/0260_02.jpg +n001241/0318_02.jpg +n001241/0341_01.jpg +n001241/0386_02.jpg +n001241/0399_01.jpg +n001241/0576_02.jpg +n001243/0176_01.jpg +n001244/0337_01.jpg +n001245/0024_01.jpg +n001245/0064_01.jpg +n001245/0090_05.jpg +n001245/0199_01.jpg +n001245/0244_01.jpg +n001245/0250_01.jpg +n001245/0282_01.jpg +n001246/0057_01.jpg +n001246/0246_02.jpg +n001246/0258_01.jpg +n001246/0286_01.jpg +n001246/0334_01.jpg +n001246/0354_01.jpg +n001246/0364_01.jpg +n001246/0563_01.jpg +n001246/0566_01.jpg +n001246/0579_01.jpg +n001247/0005_01.jpg +n001247/0073_01.jpg +n001247/0111_01.jpg +n001247/0123_01.jpg +n001247/0146_01.jpg +n001247/0265_01.jpg +n001247/0424_01.jpg +n001248/0011_01.jpg +n001248/0024_04.jpg +n001248/0090_01.jpg +n001248/0192_01.jpg +n001248/0223_01.jpg +n001248/0251_02.jpg +n001248/0407_01.jpg +n001249/0233_02.jpg +n001249/0291_01.jpg +n001249/0345_01.jpg +n001250/0008_02.jpg +n001250/0043_01.jpg +n001251/0135_01.jpg +n001251/0138_02.jpg +n001251/0211_01.jpg +n001251/0542_01.jpg +n001252/0004_01.jpg +n001252/0038_01.jpg +n001252/0116_01.jpg +n001253/0116_01.jpg +n001253/0459_03.jpg +n001254/0051_01.jpg +n001254/0134_01.jpg +n001254/0204_01.jpg +n001254/0248_01.jpg +n001255/0064_02.jpg +n001255/0149_01.jpg +n001255/0169_01.jpg +n001255/0273_01.jpg +n001257/0274_02.jpg +n001258/0151_01.jpg +n001258/0173_02.jpg +n001258/0228_01.jpg +n001259/0098_01.jpg +n001259/0106_01.jpg +n001260/0252_01.jpg +n001260/0391_01.jpg +n001261/0082_01.jpg +n001261/0113_01.jpg +n001261/0128_01.jpg +n001261/0273_01.jpg +n001262/0064_01.jpg +n001262/0101_01.jpg +n001262/0102_01.jpg +n001262/0112_01.jpg +n001262/0122_01.jpg +n001262/0159_01.jpg +n001262/0154_01.jpg +n001262/0163_06.jpg +n001262/0163_09.jpg +n001262/0197_01.jpg +n001262/0202_03.jpg +n001262/0205_01.jpg +n001262/0249_01.jpg +n001262/0286_01.jpg +n001262/0300_01.jpg +n001262/0322_01.jpg +n001262/0331_02.jpg +n001262/0348_01.jpg +n001263/0033_01.jpg +n001263/0104_03.jpg +n001263/0179_01.jpg +n001263/0229_01.jpg +n001263/0266_01.jpg +n001263/0363_01.jpg +n001263/0432_02.jpg +n001263/0434_01.jpg +n001263/0472_02.jpg +n001263/0504_02.jpg +n001264/0112_01.jpg +n001264/0134_07.jpg +n001264/0207_03.jpg +n001264/0508_01.jpg +n001265/0063_01.jpg +n001265/0101_01.jpg +n001265/0165_01.jpg +n001265/0173_02.jpg +n001265/0228_02.jpg +n001266/0008_01.jpg +n001266/0010_02.jpg +n001266/0034_01.jpg +n001266/0114_01.jpg +n001266/0127_01.jpg +n001266/0132_02.jpg +n001266/0142_02.jpg +n001266/0163_01.jpg +n001266/0261_01.jpg +n001267/0107_01.jpg +n001268/0002_01.jpg +n001268/0010_01.jpg +n001268/0159_01.jpg +n001268/0180_01.jpg +n001268/0261_01.jpg +n001268/0282_01.jpg +n001268/0291_01.jpg +n001268/0294_01.jpg +n001268/0295_01.jpg +n001268/0311_03.jpg +n001268/0358_01.jpg +n001269/0033_02.jpg +n001269/0064_02.jpg +n001269/0158_02.jpg +n001269/0192_01.jpg +n001269/0250_02.jpg +n001269/0262_01.jpg +n001269/0276_01.jpg +n001269/0348_02.jpg +n001269/0349_01.jpg +n001269/0362_01.jpg +n001270/0051_01.jpg +n001270/0173_01.jpg +n001271/0066_01.jpg +n001271/0070_01.jpg +n001272/0001_01.jpg +n001272/0003_01.jpg +n001272/0015_01.jpg +n001272/0020_01.jpg +n001272/0037_03.jpg +n001272/0082_02.jpg +n001272/0150_01.jpg +n001272/0209_02.jpg +n001272/0223_01.jpg +n001272/0239_01.jpg +n001272/0246_01.jpg +n001272/0250_01.jpg +n001272/0307_01.jpg +n001272/0389_01.jpg +n001273/0022_02.jpg +n001273/0049_01.jpg +n001273/0084_01.jpg +n001273/0107_01.jpg +n001273/0116_02.jpg +n001273/0150_02.jpg +n001275/0144_02.jpg +n001275/0220_02.jpg +n001275/0246_01.jpg +n001276/0199_02.jpg +n001276/0255_01.jpg +n001276/0255_02.jpg +n001278/0025_01.jpg +n001278/0046_01.jpg +n001278/0073_01.jpg +n001278/0170_01.jpg +n001278/0170_02.jpg +n001278/0234_01.jpg +n001278/0235_01.jpg +n001278/0359_02.jpg +n001279/0033_02.jpg +n001279/0039_02.jpg +n001279/0167_01.jpg +n001280/0127_01.jpg +n001281/0054_01.jpg +n001281/0180_01.jpg +n001281/0242_01.jpg +n001281/0243_01.jpg +n001281/0243_02.jpg +n001281/0243_04.jpg +n001281/0243_05.jpg +n001281/0243_06.jpg +n001281/0267_01.jpg +n001281/0284_01.jpg +n001281/0372_01.jpg +n001281/0374_01.jpg +n001281/0433_02.jpg +n001281/0467_02.jpg +n001282/0023_02.jpg +n001282/0099_01.jpg +n001282/0107_01.jpg +n001282/0141_01.jpg +n001282/0187_02.jpg +n001282/0203_01.jpg +n001283/0072_01.jpg +n001283/0084_01.jpg +n001283/0095_01.jpg +n001283/0109_01.jpg +n001283/0127_01.jpg +n001283/0195_01.jpg +n001283/0219_01.jpg +n001285/0017_01.jpg +n001285/0111_01.jpg +n001285/0229_01.jpg +n001285/0304_02.jpg +n001285/0372_01.jpg +n001285/0373_01.jpg +n001285/0374_01.jpg +n001285/0419_02.jpg +n001285/0421_01.jpg +n001285/0500_01.jpg +n001285/0499_01.jpg +n001285/0516_01.jpg +n001286/0041_01.jpg +n001286/0043_08.jpg +n001286/0053_03.jpg +n001286/0120_01.jpg +n001286/0125_01.jpg +n001286/0258_01.jpg +n001287/0058_01.jpg +n001287/0058_02.jpg +n001287/0073_01.jpg +n001287/0093_02.jpg +n001287/0114_01.jpg +n001287/0117_02.jpg +n001287/0126_01.jpg +n001287/0149_01.jpg +n001287/0154_01.jpg +n001287/0171_01.jpg +n001287/0268_03.jpg +n001287/0323_01.jpg +n001287/0325_01.jpg +n001287/0343_02.jpg +n001287/0365_02.jpg +n001287/0370_01.jpg +n001287/0376_01.jpg +n001287/0393_01.jpg +n001287/0397_02.jpg +n001287/0411_01.jpg +n001288/0033_02.jpg +n001288/0135_02.jpg +n001288/0250_02.jpg +n001288/0380_01.jpg +n001288/0406_02.jpg +n001289/0029_01.jpg +n001289/0075_01.jpg +n001289/0080_02.jpg +n001289/0184_03.jpg +n001289/0236_01.jpg +n001289/0262_02.jpg +n001289/0299_02.jpg +n001289/0334_01.jpg +n001290/0202_02.jpg +n001290/0342_02.jpg +n001292/0056_01.jpg +n001292/0129_01.jpg +n001292/0153_01.jpg +n001292/0172_03.jpg +n001292/0173_01.jpg +n001292/0197_02.jpg +n001292/0233_01.jpg +n001292/0231_01.jpg +n001292/0284_01.jpg +n001292/0332_01.jpg +n001294/0041_02.jpg +n001294/0171_02.jpg +n001294/0193_01.jpg +n001294/0270_01.jpg +n001294/0323_01.jpg +n001294/0354_01.jpg +n001294/0351_02.jpg +n001294/0359_02.jpg +n001294/0363_02.jpg +n001294/0391_02.jpg +n001294/0392_01.jpg +n001294/0424_01.jpg +n001295/0058_02.jpg +n001295/0185_01.jpg +n001295/0188_01.jpg +n001295/0191_01.jpg +n001295/0257_01.jpg +n001295/0264_01.jpg +n001295/0265_01.jpg +n001298/0001_01.jpg +n001298/0218_01.jpg +n001298/0228_01.jpg +n001298/0249_01.jpg +n001298/0266_01.jpg +n001298/0317_01.jpg +n001298/0342_02.jpg +n001298/0364_01.jpg +n001298/0407_01.jpg +n001300/0023_01.jpg +n001300/0053_01.jpg +n001300/0056_01.jpg +n001300/0223_01.jpg +n001301/0121_01.jpg +n001301/0183_02.jpg +n001301/0382_03.jpg +n001305/0027_02.jpg +n001305/0052_01.jpg +n001305/0058_03.jpg +n001305/0129_01.jpg +n001305/0195_01.jpg +n001305/0211_01.jpg +n001305/0215_01.jpg +n001305/0224_01.jpg +n001305/0232_01.jpg +n001305/0240_01.jpg +n001305/0262_01.jpg +n001305/0285_01.jpg +n001305/0285_02.jpg +n001305/0316_01.jpg +n001306/0104_01.jpg +n001307/0035_01.jpg +n001307/0078_01.jpg +n001307/0219_01.jpg +n001307/0234_01.jpg +n001308/0004_01.jpg +n001308/0074_01.jpg +n001308/0077_01.jpg +n001308/0085_01.jpg +n001308/0140_01.jpg +n001308/0261_02.jpg +n001308/0268_02.jpg +n001308/0544_01.jpg +n001308/0544_02.jpg +n001309/0016_01.jpg +n001309/0018_02.jpg +n001309/0043_01.jpg +n001309/0177_01.jpg +n001309/0180_01.jpg +n001309/0188_02.jpg +n001309/0213_01.jpg +n001309/0266_01.jpg +n001309/0286_01.jpg +n001309/0286_02.jpg +n001309/0293_01.jpg +n001309/0294_01.jpg +n001309/0319_02.jpg +n001309/0327_01.jpg +n001309/0404_01.jpg +n001309/0422_01.jpg +n001310/0052_01.jpg +n001310/0060_01.jpg +n001310/0140_02.jpg +n001310/0205_02.jpg +n001310/0208_01.jpg +n001310/0246_01.jpg +n001310/0251_02.jpg +n001310/0279_01.jpg +n001311/0130_01.jpg +n001311/0159_01.jpg +n001311/0178_01.jpg +n001311/0220_02.jpg +n001311/0221_01.jpg +n001311/0224_01.jpg +n001311/0224_02.jpg +n001311/0246_01.jpg +n001311/0262_02.jpg +n001311/0266_04.jpg +n001311/0292_01.jpg +n001311/0297_02.jpg +n001311/0333_01.jpg +n001311/0336_01.jpg +n001311/0343_01.jpg +n001311/0347_01.jpg +n001311/0375_02.jpg +n001311/0435_02.jpg +n001312/0037_01.jpg +n001312/0044_01.jpg +n001312/0064_01.jpg +n001312/0094_01.jpg +n001312/0107_02.jpg +n001312/0314_01.jpg +n001312/0589_01.jpg +n001313/0019_01.jpg +n001313/0025_01.jpg +n001313/0052_01.jpg +n001313/0059_01.jpg +n001313/0060_01.jpg +n001313/0174_02.jpg +n001313/0175_01.jpg +n001313/0197_01.jpg +n001313/0203_01.jpg +n001313/0221_01.jpg +n001313/0263_01.jpg +n001313/0321_01.jpg +n001313/0378_05.jpg +n001314/0164_01.jpg +n001314/0213_01.jpg +n001314/0328_01.jpg +n001314/0335_01.jpg +n001314/0360_01.jpg +n001315/0079_01.jpg +n001315/0079_02.jpg +n001315/0190_01.jpg +n001315/0260_02.jpg +n001315/0269_02.jpg +n001315/0373_01.jpg +n001315/0385_01.jpg +n001315/0549_01.jpg +n001315/0612_01.jpg +n001315/0618_01.jpg +n001316/0002_02.jpg +n001316/0095_02.jpg +n001316/0177_02.jpg +n001316/0304_01.jpg +n001316/0430_05.jpg +n001316/0603_01.jpg +n001316/0610_02.jpg +n001317/0003_01.jpg +n001317/0078_01.jpg +n001317/0088_01.jpg +n001319/0001_02.jpg +n001319/0076_01.jpg +n001319/0192_03.jpg +n001320/0087_01.jpg +n001320/0103_01.jpg +n001320/0168_01.jpg +n001320/0260_01.jpg +n001320/0300_01.jpg +n001320/0375_01.jpg +n001321/0002_02.jpg +n001321/0066_01.jpg +n001321/0117_01.jpg +n001321/0153_02.jpg +n001321/0154_02.jpg +n001321/0159_01.jpg +n001321/0165_01.jpg +n001321/0213_01.jpg +n001321/0224_02.jpg +n001321/0436_02.jpg +n001322/0021_02.jpg +n001322/0047_01.jpg +n001322/0127_02.jpg +n001322/0317_02.jpg +n001322/0388_02.jpg +n001322/0509_03.jpg +n001322/0640_01.jpg +n001323/0004_02.jpg +n001323/0283_01.jpg +n001323/0283_02.jpg +n001325/0064_01.jpg +n001325/0066_01.jpg +n001325/0203_02.jpg +n001325/0212_01.jpg +n001326/0028_01.jpg +n001326/0070_02.jpg +n001326/0072_03.jpg +n001326/0096_01.jpg +n001326/0132_01.jpg +n001326/0131_02.jpg +n001326/0324_02.jpg +n001327/0050_01.jpg +n001327/0064_03.jpg +n001327/0069_03.jpg +n001327/0069_04.jpg +n001327/0069_05.jpg +n001327/0099_01.jpg +n001327/0124_02.jpg +n001327/0150_01.jpg +n001327/0163_01.jpg +n001327/0172_01.jpg +n001327/0314_01.jpg +n001327/0335_01.jpg +n001328/0059_01.jpg +n001328/0090_01.jpg +n001328/0100_01.jpg +n001328/0152_01.jpg +n001328/0168_01.jpg +n001328/0256_01.jpg +n001328/0278_01.jpg +n001328/0313_01.jpg +n001328/0310_01.jpg +n001329/0074_01.jpg +n001329/0109_01.jpg +n001329/0135_02.jpg +n001329/0143_01.jpg +n001329/0160_01.jpg +n001329/0181_01.jpg +n001329/0259_02.jpg +n001329/0282_01.jpg +n001329/0292_01.jpg +n001329/0338_01.jpg +n001329/0345_01.jpg +n001329/0354_01.jpg +n001329/0392_01.jpg +n001330/0031_01.jpg +n001330/0037_01.jpg +n001330/0052_02.jpg +n001330/0107_02.jpg +n001330/0196_03.jpg +n001331/0088_01.jpg +n001331/0094_01.jpg +n001331/0126_03.jpg +n001331/0131_01.jpg +n001331/0138_01.jpg +n001331/0321_02.jpg +n001331/0325_01.jpg +n001331/0330_02.jpg +n001331/0335_01.jpg +n001331/0336_02.jpg +n001332/0046_01.jpg +n001332/0050_01.jpg +n001332/0085_01.jpg +n001332/0155_02.jpg +n001332/0242_01.jpg +n001332/0290_02.jpg +n001332/0305_01.jpg +n001332/0319_02.jpg +n001333/0065_01.jpg +n001333/0160_01.jpg +n001333/0245_01.jpg +n001333/0323_01.jpg +n001333/0336_01.jpg +n001333/0343_01.jpg +n001333/0433_01.jpg +n001333/0613_01.jpg +n001333/0619_01.jpg +n001334/0019_01.jpg +n001334/0072_02.jpg +n001334/0088_01.jpg +n001334/0099_01.jpg +n001334/0167_01.jpg +n001334/0202_03.jpg +n001334/0307_01.jpg +n001334/0567_02.jpg +n001335/0040_01.jpg +n001335/0164_01.jpg +n001335/0182_01.jpg +n001335/0188_02.jpg +n001335/0250_02.jpg +n001335/0279_01.jpg +n001335/0296_01.jpg +n001335/0377_01.jpg +n001336/0176_01.jpg +n001338/0132_01.jpg +n001338/0143_01.jpg +n001338/0179_01.jpg +n001339/0003_02.jpg +n001339/0009_02.jpg +n001339/0080_01.jpg +n001339/0085_02.jpg +n001339/0105_01.jpg +n001339/0108_01.jpg +n001339/0139_01.jpg +n001339/0141_01.jpg +n001339/0141_02.jpg +n001339/0143_01.jpg +n001339/0184_04.jpg +n001339/0193_01.jpg +n001339/0237_01.jpg +n001339/0263_02.jpg +n001339/0361_02.jpg +n001339/0433_01.jpg +n001339/0436_01.jpg +n001339/0442_01.jpg +n001339/0448_02.jpg +n001339/0459_02.jpg +n001339/0464_01.jpg +n001339/0465_01.jpg +n001339/0467_01.jpg +n001339/0467_02.jpg +n001340/0207_01.jpg +n001340/0224_01.jpg +n001342/0091_01.jpg +n001342/0281_01.jpg +n001343/0090_02.jpg +n001343/0153_01.jpg +n001343/0200_01.jpg +n001343/0207_02.jpg +n001343/0285_04.jpg +n001343/0398_01.jpg +n001344/0046_01.jpg +n001344/0075_01.jpg +n001344/0097_01.jpg +n001344/0111_01.jpg +n001344/0213_03.jpg +n001344/0235_01.jpg +n001344/0279_01.jpg +n001344/0287_03.jpg +n001344/0318_01.jpg +n001344/0367_01.jpg +n001344/0450_01.jpg +n001344/0469_01.jpg +n001344/0469_02.jpg +n001344/0482_01.jpg +n001345/0068_01.jpg +n001345/0126_01.jpg +n001345/0279_01.jpg +n001345/0290_01.jpg +n001345/0297_02.jpg +n001345/0332_02.jpg +n001345/0390_01.jpg +n001346/0072_01.jpg +n001346/0111_01.jpg +n001346/0114_01.jpg +n001346/0160_03.jpg +n001346/0239_01.jpg +n001346/0248_01.jpg +n001346/0341_01.jpg +n001347/0086_01.jpg +n001347/0086_02.jpg +n001348/0122_01.jpg +n001348/0166_01.jpg +n001348/0165_01.jpg +n001348/0297_01.jpg +n001348/0415_02.jpg +n001348/0422_01.jpg +n001348/0434_02.jpg +n001348/0476_01.jpg +n001349/0035_01.jpg +n001349/0153_01.jpg +n001349/0170_02.jpg +n001349/0303_02.jpg +n001349/0308_02.jpg +n001349/0328_01.jpg +n001349/0425_01.jpg +n001351/0050_01.jpg +n001351/0050_02.jpg +n001351/0132_03.jpg +n001351/0144_01.jpg +n001351/0168_05.jpg +n001351/0168_08.jpg +n001351/0168_10.jpg +n001351/0200_01.jpg +n001351/0271_01.jpg +n001351/0271_02.jpg +n001351/0279_02.jpg +n001351/0325_02.jpg +n001351/0325_01.jpg +n001352/0064_01.jpg +n001352/0099_03.jpg +n001352/0128_01.jpg +n001352/0167_01.jpg +n001352/0177_01.jpg +n001352/0193_01.jpg +n001352/0203_01.jpg +n001352/0216_03.jpg +n001352/0240_01.jpg +n001352/0336_01.jpg +n001352/0360_02.jpg +n001352/0365_02.jpg +n001352/0409_03.jpg +n001352/0412_01.jpg +n001352/0514_02.jpg +n001352/0561_01.jpg +n001352/0580_02.jpg +n001352/0597_01.jpg +n001352/0597_02.jpg +n001353/0015_01.jpg +n001353/0038_01.jpg +n001354/0038_03.jpg +n001354/0100_01.jpg +n001354/0108_02.jpg +n001354/0237_01.jpg +n001354/0254_01.jpg +n001354/0296_02.jpg +n001354/0299_02.jpg +n001354/0322_02.jpg +n001354/0327_01.jpg +n001354/0340_01.jpg +n001354/0342_02.jpg +n001354/0371_01.jpg +n001354/0371_02.jpg +n001354/0372_01.jpg +n001354/0406_01.jpg +n001354/0789_03.jpg +n001355/0112_01.jpg +n001355/0141_01.jpg +n001355/0167_01.jpg +n001355/0168_01.jpg +n001355/0173_02.jpg +n001355/0198_03.jpg +n001355/0206_01.jpg +n001355/0240_03.jpg +n001355/0255_02.jpg +n001355/0324_03.jpg +n001355/0496_02.jpg +n001355/0515_02.jpg +n001356/0052_01.jpg +n001356/0118_01.jpg +n001356/0143_03.jpg +n001356/0164_01.jpg +n001356/0351_01.jpg +n001356/0357_01.jpg +n001357/0294_02.jpg +n001358/0024_02.jpg +n001358/0040_01.jpg +n001358/0044_03.jpg +n001358/0054_01.jpg +n001358/0148_02.jpg +n001358/0150_01.jpg +n001358/0154_03.jpg +n001358/0261_03.jpg +n001358/0291_01.jpg +n001359/0022_02.jpg +n001359/0053_01.jpg +n001359/0054_01.jpg +n001359/0062_03.jpg +n001359/0126_02.jpg +n001359/0189_02.jpg +n001359/0197_03.jpg +n001359/0275_01.jpg +n001359/0277_01.jpg +n001359/0354_01.jpg +n001359/0469_02.jpg +n001359/0509_01.jpg +n001359/0530_02.jpg +n001359/0548_02.jpg +n001360/0058_02.jpg +n001360/0106_02.jpg +n001360/0117_01.jpg +n001360/0410_01.jpg +n001361/0131_01.jpg +n001362/0155_01.jpg +n001362/0170_01.jpg +n001362/0179_02.jpg +n001362/0193_01.jpg +n001363/0062_02.jpg +n001364/0064_01.jpg +n001364/0108_01.jpg +n001364/0183_01.jpg +n001364/0245_01.jpg +n001364/0415_01.jpg +n001365/0252_01.jpg +n001365/0273_01.jpg +n001365/0429_01.jpg +n001365/0464_03.jpg +n001366/0001_01.jpg +n001366/0087_01.jpg +n001366/0600_02.jpg +n001367/0002_01.jpg +n001367/0179_01.jpg +n001367/0301_01.jpg +n001367/0428_02.jpg +n001367/0457_01.jpg +n001367/0494_01.jpg +n001367/0563_01.jpg +n001369/0013_02.jpg +n001369/0014_01.jpg +n001369/0015_01.jpg +n001369/0017_01.jpg +n001369/0028_01.jpg +n001369/0030_02.jpg +n001369/0072_02.jpg +n001369/0123_01.jpg +n001369/0135_01.jpg +n001369/0146_02.jpg +n001369/0166_02.jpg +n001369/0192_02.jpg +n001369/0197_01.jpg +n001369/0198_01.jpg +n001369/0203_01.jpg +n001369/0206_01.jpg +n001369/0206_03.jpg +n001369/0251_02.jpg +n001369/0270_01.jpg +n001369/0277_01.jpg +n001369/0295_01.jpg +n001369/0305_03.jpg +n001369/0320_02.jpg +n001369/0335_01.jpg +n001369/0353_01.jpg +n001369/0363_01.jpg +n001369/0377_01.jpg +n001369/0384_01.jpg +n001369/0389_01.jpg +n001369/0444_01.jpg +n001369/0473_05.jpg +n001369/0501_01.jpg +n001369/0504_02.jpg +n001369/0535_03.jpg +n001369/0542_01.jpg +n001369/0554_01.jpg +n001369/0589_01.jpg +n001370/0088_02.jpg +n001370/0127_02.jpg +n001370/0182_02.jpg +n001370/0203_02.jpg +n001370/0261_02.jpg +n001370/0266_02.jpg +n001370/0311_01.jpg +n001370/0319_01.jpg +n001370/0340_01.jpg +n001370/0363_01.jpg +n001370/0487_03.jpg +n001371/0109_05.jpg +n001371/0135_02.jpg +n001371/0245_08.jpg +n001371/0306_02.jpg +n001372/0109_02.jpg +n001372/0138_01.jpg +n001372/0164_03.jpg +n001372/0218_03.jpg +n001372/0236_02.jpg +n001372/0244_01.jpg +n001372/0282_01.jpg +n001372/0308_02.jpg +n001372/0324_01.jpg +n001372/0341_01.jpg +n001372/0375_01.jpg +n001372/0383_01.jpg +n001373/0161_01.jpg +n001373/0165_01.jpg +n001373/0236_01.jpg +n001373/0241_01.jpg +n001373/0374_02.jpg +n001374/0127_01.jpg +n001374/0200_01.jpg +n001374/0211_01.jpg +n001374/0242_01.jpg +n001374/0271_01.jpg +n001375/0119_04.jpg +n001375/0166_02.jpg +n001375/0174_02.jpg +n001375/0183_01.jpg +n001375/0199_03.jpg +n001375/0213_04.jpg +n001375/0225_01.jpg +n001375/0297_01.jpg +n001375/0364_01.jpg +n001375/0375_01.jpg +n001375/0378_01.jpg +n001375/0400_01.jpg +n001375/0408_02.jpg +n001376/0035_03.jpg +n001376/0099_02.jpg +n001376/0180_02.jpg +n001376/0207_02.jpg +n001376/0254_03.jpg +n001376/0328_03.jpg +n001377/0025_01.jpg +n001377/0114_01.jpg +n001377/0593_01.jpg +n001378/0005_01.jpg +n001378/0006_03.jpg +n001378/0009_01.jpg +n001378/0027_02.jpg +n001378/0053_02.jpg +n001378/0055_01.jpg +n001378/0063_05.jpg +n001378/0063_05.jpg +n001378/0068_01.jpg +n001378/0086_01.jpg +n001378/0091_01.jpg +n001378/0093_02.jpg +n001378/0103_01.jpg +n001378/0104_01.jpg +n001378/0125_02.jpg +n001378/0138_04.jpg +n001378/0141_02.jpg +n001378/0159_01.jpg +n001378/0162_03.jpg +n001378/0197_01.jpg +n001378/0510_03.jpg +n001378/0935_01.jpg +n001378/0939_01.jpg +n001379/0017_01.jpg +n001379/0262_01.jpg +n001380/0221_01.jpg +n001381/0200_02.jpg +n001381/0386_01.jpg +n001382/0008_02.jpg +n001382/0080_01.jpg +n001382/0082_04.jpg +n001382/0105_02.jpg +n001382/0150_04.jpg +n001382/0350_03.jpg +n001383/0272_01.jpg +n001384/0450_01.jpg +n001385/0056_02.jpg +n001385/0108_01.jpg +n001385/0138_05.jpg +n001385/0160_01.jpg +n001385/0243_01.jpg +n001385/0246_01.jpg +n001385/0312_01.jpg +n001385/0316_01.jpg +n001386/0262_09.jpg +n001387/0153_01.jpg +n001387/0211_02.jpg +n001387/0312_01.jpg +n001388/0101_02.jpg +n001388/0179_01.jpg +n001389/0078_01.jpg +n001389/0332_01.jpg +n001389/0385_02.jpg +n001390/0159_02.jpg +n001391/0015_02.jpg +n001391/0073_01.jpg +n001391/0105_01.jpg +n001391/0143_01.jpg +n001391/0153_02.jpg +n001391/0173_02.jpg +n001391/0237_01.jpg +n001391/0338_01.jpg +n001391/0354_03.jpg +n001391/0374_01.jpg +n001391/0376_01.jpg +n001391/0496_01.jpg +n001391/0657_01.jpg +n001392/0213_04.jpg +n001392/0337_01.jpg +n001392/0513_02.jpg +n001392/0503_01.jpg +n001393/0003_01.jpg +n001393/0083_04.jpg +n001393/0271_02.jpg +n001393/0335_01.jpg +n001393/0336_01.jpg +n001393/0342_01.jpg +n001393/0357_02.jpg +n001393/0373_02.jpg +n001393/0404_02.jpg +n001393/0415_01.jpg +n001394/0185_01.jpg +n001395/0024_01.jpg +n001395/0036_02.jpg +n001395/0167_02.jpg +n001395/0182_02.jpg +n001395/0288_02.jpg +n001395/0386_01.jpg +n001395/0392_01.jpg +n001396/0272_03.jpg +n001397/0054_01.jpg +n001397/0077_01.jpg +n001397/0172_01.jpg +n001397/0235_01.jpg +n001397/0417_01.jpg +n001397/0531_01.jpg +n001397/0605_01.jpg +n001398/0018_01.jpg +n001398/0125_01.jpg +n001398/0286_02.jpg +n001398/0314_01.jpg +n001399/0025_01.jpg +n001399/0231_01.jpg +n001399/0237_01.jpg +n001399/0246_01.jpg +n001399/0249_01.jpg +n001400/0064_01.jpg +n001400/0196_01.jpg +n001400/0308_01.jpg +n001400/0376_01.jpg +n001402/0166_01.jpg +n001403/0014_02.jpg +n001403/0101_02.jpg +n001403/0195_01.jpg +n001403/0314_01.jpg +n001403/0324_01.jpg +n001403/0335_01.jpg +n001403/0404_01.jpg +n001403/0409_03.jpg +n001404/0014_01.jpg +n001404/0053_02.jpg +n001404/0218_02.jpg +n001404/0319_01.jpg +n001404/0411_01.jpg +n001405/0002_01.jpg +n001405/0369_01.jpg +n001406/0031_01.jpg +n001407/0259_01.jpg +n001407/0517_01.jpg +n001408/0104_01.jpg +n001408/0106_01.jpg +n001408/0189_02.jpg +n001408/0227_04.jpg +n001408/0433_01.jpg +n001409/0012_02.jpg +n001409/0013_01.jpg +n001409/0014_02.jpg +n001409/0043_02.jpg +n001409/0055_02.jpg +n001409/0203_01.jpg +n001409/0234_01.jpg +n001409/0237_01.jpg +n001409/0314_02.jpg +n001409/0330_01.jpg +n001409/0420_02.jpg +n001409/0423_02.jpg +n001410/0230_01.jpg +n001410/0389_01.jpg +n001410/0440_01.jpg +n001411/0024_01.jpg +n001411/0085_01.jpg +n001411/0288_01.jpg +n001412/0020_01.jpg +n001412/0027_01.jpg +n001412/0055_04.jpg +n001412/0252_02.jpg +n001412/0315_01.jpg +n001412/0357_01.jpg +n001413/0057_02.jpg +n001413/0234_01.jpg +n001413/0259_02.jpg +n001413/0267_01.jpg +n001413/0271_01.jpg +n001413/0276_01.jpg +n001413/0327_02.jpg +n001413/0379_01.jpg +n001413/0410_01.jpg +n001413/0457_01.jpg +n001414/0093_01.jpg +n001414/0245_01.jpg +n001414/0281_01.jpg +n001415/0013_03.jpg +n001415/0052_01.jpg +n001415/0132_02.jpg +n001415/0156_01.jpg +n001415/0183_01.jpg +n001415/0276_01.jpg +n001415/0286_02.jpg +n001415/0303_01.jpg +n001415/0329_01.jpg +n001415/0353_02.jpg +n001415/0383_01.jpg +n001416/0006_01.jpg +n001416/0135_02.jpg +n001416/0194_01.jpg +n001417/0045_02.jpg +n001417/0064_02.jpg +n001417/0091_02.jpg +n001417/0088_01.jpg +n001417/0148_02.jpg +n001417/0167_01.jpg +n001417/0168_01.jpg +n001417/0279_02.jpg +n001419/0110_01.jpg +n001419/0116_01.jpg +n001419/0392_01.jpg +n001420/0243_01.jpg +n001420/0258_02.jpg +n001420/0302_02.jpg +n001421/0122_02.jpg +n001421/0169_01.jpg +n001421/0171_01.jpg +n001422/0111_01.jpg +n001422/0200_02.jpg +n001422/0310_01.jpg +n001422/0407_01.jpg +n001422/0458_01.jpg +n001423/0050_01.jpg +n001423/0095_01.jpg +n001423/0097_02.jpg +n001423/0144_01.jpg +n001424/0005_02.jpg +n001424/0095_02.jpg +n001424/0103_01.jpg +n001424/0128_02.jpg +n001424/0147_01.jpg +n001424/0260_02.jpg +n001424/0378_02.jpg +n001425/0253_01.jpg +n001425/0313_01.jpg +n001426/0012_01.jpg +n001426/0027_01.jpg +n001426/0037_02.jpg +n001426/0044_03.jpg +n001426/0071_02.jpg +n001426/0258_01.jpg +n001427/0082_02.jpg +n001427/0124_01.jpg +n001427/0153_01.jpg +n001427/0169_01.jpg +n001427/0221_01.jpg +n001427/0262_01.jpg +n001427/0330_01.jpg +n001428/0068_01.jpg +n001428/0076_01.jpg +n001428/0195_01.jpg +n001428/0230_01.jpg +n001428/0298_01.jpg +n001428/0543_01.jpg +n001428/0658_01.jpg +n001429/0173_01.jpg +n001429/0174_01.jpg +n001430/0071_01.jpg +n001430/0090_01.jpg +n001430/0294_01.jpg +n001430/0363_02.jpg +n001430/0411_01.jpg +n001431/0056_01.jpg +n001431/0108_02.jpg +n001431/0264_01.jpg +n001431/0440_03.jpg +n001432/0033_03.jpg +n001432/0146_02.jpg +n001432/0166_01.jpg +n001432/0173_01.jpg +n001432/0200_02.jpg +n001432/0240_01.jpg +n001432/0287_01.jpg +n001432/0334_01.jpg +n001432/0355_03.jpg +n001432/0355_01.jpg +n001432/0363_01.jpg +n001433/0079_01.jpg +n001433/0085_01.jpg +n001433/0141_01.jpg +n001433/0176_01.jpg +n001433/0310_01.jpg +n001434/0083_01.jpg +n001434/0206_01.jpg +n001434/0260_01.jpg +n001434/0300_01.jpg +n001434/0308_01.jpg +n001434/0311_01.jpg +n001434/0364_01.jpg +n001434/0419_01.jpg +n001436/0171_01.jpg +n001436/0238_01.jpg +n001436/0299_01.jpg +n001437/0049_01.jpg +n001437/0170_01.jpg +n001437/0205_01.jpg +n001437/0212_02.jpg +n001437/0227_01.jpg +n001437/0231_01.jpg +n001437/0273_01.jpg +n001437/0412_01.jpg +n001437/0459_01.jpg +n001440/0015_01.jpg +n001440/0048_02.jpg +n001440/0049_01.jpg +n001440/0072_01.jpg +n001440/0134_01.jpg +n001440/0150_02.jpg +n001440/0152_02.jpg +n001440/0193_02.jpg +n001440/0202_01.jpg +n001441/0008_02.jpg +n001441/0062_02.jpg +n001441/0065_01.jpg +n001441/0067_02.jpg +n001441/0073_02.jpg +n001441/0076_01.jpg +n001441/0085_01.jpg +n001441/0107_01.jpg +n001441/0106_02.jpg +n001441/0224_01.jpg +n001441/0471_02.jpg +n001441/0475_01.jpg +n001441/0482_02.jpg +n001442/0165_02.jpg +n001442/0264_01.jpg +n001442/0293_01.jpg +n001442/0311_01.jpg +n001442/0333_01.jpg +n001442/0432_01.jpg +n001442/0433_01.jpg +n001442/0452_01.jpg +n001442/0516_01.jpg +n001443/0224_01.jpg +n001443/0278_01.jpg +n001443/0297_01.jpg +n001443/0370_02.jpg +n001444/0005_01.jpg +n001444/0022_02.jpg +n001444/0024_02.jpg +n001444/0037_02.jpg +n001444/0044_03.jpg +n001444/0063_01.jpg +n001444/0064_01.jpg +n001444/0074_01.jpg +n001444/0080_01.jpg +n001444/0091_03.jpg +n001444/0096_03.jpg +n001444/0103_05.jpg +n001444/0225_04.jpg +n001444/0337_01.jpg +n001444/0459_02.jpg +n001445/0027_02.jpg +n001445/0043_01.jpg +n001445/0093_02.jpg +n001445/0256_01.jpg +n001445/0280_01.jpg +n001445/0363_01.jpg +n001445/0368_01.jpg +n001445/0388_01.jpg +n001445/0390_01.jpg +n001445/0394_01.jpg +n001445/0502_01.jpg +n001445/0528_02.jpg +n001447/0014_01.jpg +n001447/0043_01.jpg +n001447/0057_03.jpg +n001447/0071_03.jpg +n001448/0085_01.jpg +n001448/0084_01.jpg +n001449/0217_01.jpg +n001449/0288_01.jpg +n001450/0032_02.jpg +n001450/0199_01.jpg +n001450/0204_02.jpg +n001450/0205_01.jpg +n001450/0263_02.jpg +n001451/0007_01.jpg +n001451/0111_01.jpg +n001451/0154_02.jpg +n001451/0201_02.jpg +n001451/0205_02.jpg +n001451/0292_01.jpg +n001451/0300_02.jpg +n001451/0301_01.jpg +n001451/0301_02.jpg +n001451/0308_02.jpg +n001452/0070_01.jpg +n001452/0099_01.jpg +n001452/0214_11.jpg +n001452/0236_01.jpg +n001453/0011_02.jpg +n001453/0023_01.jpg +n001453/0139_01.jpg +n001453/0150_01.jpg +n001454/0079_01.jpg +n001454/0078_01.jpg +n001454/0097_01.jpg +n001455/0070_02.jpg +n001455/0298_03.jpg +n001456/0472_01.jpg +n001457/0002_01.jpg +n001457/0067_01.jpg +n001457/0076_01.jpg +n001457/0115_01.jpg +n001457/0116_02.jpg +n001457/0177_01.jpg +n001457/0183_01.jpg +n001457/0186_01.jpg +n001457/0278_01.jpg +n001457/0324_01.jpg +n001458/0111_02.jpg +n001458/0136_01.jpg +n001458/0164_01.jpg +n001458/0215_02.jpg +n001458/0219_02.jpg +n001458/0223_02.jpg +n001458/0431_01.jpg +n001458/0433_02.jpg +n001459/0317_01.jpg +n001459/0318_01.jpg +n001460/0173_01.jpg +n001460/0210_01.jpg +n001460/0219_01.jpg +n001460/0300_02.jpg +n001460/0355_01.jpg +n001460/0462_01.jpg +n001460/0466_02.jpg +n001460/0467_02.jpg +n001461/0027_01.jpg +n001461/0029_02.jpg +n001461/0032_01.jpg +n001461/0152_01.jpg +n001461/0162_01.jpg +n001461/0471_02.jpg +n001461/0473_01.jpg +n001462/0041_02.jpg +n001462/0063_02.jpg +n001462/0088_01.jpg +n001462/0129_01.jpg +n001462/0131_01.jpg +n001462/0146_01.jpg +n001462/0164_01.jpg +n001462/0203_02.jpg +n001463/0019_02.jpg +n001463/0122_01.jpg +n001463/0131_01.jpg +n001463/0164_02.jpg +n001463/0256_02.jpg +n001463/0258_02.jpg +n001463/0309_01.jpg +n001463/0407_01.jpg +n001464/0166_01.jpg +n001464/0192_01.jpg +n001464/0199_01.jpg +n001464/0268_01.jpg +n001465/0152_02.jpg +n001465/0253_02.jpg +n001465/0329_01.jpg +n001465/0378_02.jpg +n001466/0133_01.jpg +n001466/0218_01.jpg +n001466/0300_01.jpg +n001466/0305_01.jpg +n001466/0404_01.jpg +n001466/0414_01.jpg +n001466/0563_01.jpg +n001466/0607_01.jpg +n001466/0621_01.jpg +n001466/0695_01.jpg +n001468/0005_01.jpg +n001468/0102_02.jpg +n001468/0112_02.jpg +n001468/0128_02.jpg +n001468/0182_04.jpg +n001468/0430_01.jpg +n001469/0021_01.jpg +n001469/0053_01.jpg +n001469/0123_01.jpg +n001469/0264_02.jpg +n001469/0264_01.jpg +n001469/0288_01.jpg +n001469/0288_02.jpg +n001469/0360_01.jpg +n001469/0484_01.jpg +n001470/0027_04.jpg +n001470/0053_02.jpg +n001470/0055_03.jpg +n001470/0056_01.jpg +n001470/0218_01.jpg +n001470/0216_02.jpg +n001470/0236_01.jpg +n001470/0264_01.jpg +n001470/0320_01.jpg +n001470/0430_01.jpg +n001470/0505_01.jpg +n001471/0063_04.jpg +n001471/0103_01.jpg +n001471/0119_01.jpg +n001471/0121_02.jpg +n001471/0159_01.jpg +n001471/0179_02.jpg +n001471/0187_02.jpg +n001471/0261_01.jpg +n001471/0453_03.jpg +n001471/0640_01.jpg +n001472/0059_02.jpg +n001472/0063_02.jpg +n001472/0105_02.jpg +n001472/0106_01.jpg +n001472/0112_01.jpg +n001472/0118_02.jpg +n001472/0144_01.jpg +n001472/0156_02.jpg +n001472/0171_05.jpg +n001472/0188_04.jpg +n001472/0190_01.jpg +n001472/0191_02.jpg +n001472/0196_01.jpg +n001472/0206_02.jpg +n001472/0209_03.jpg +n001472/0219_03.jpg +n001472/0320_01.jpg +n001472/0324_01.jpg +n001472/0329_05.jpg +n001472/0336_03.jpg +n001472/0346_02.jpg +n001473/0081_02.jpg +n001473/0087_01.jpg +n001473/0110_01.jpg +n001473/0155_02.jpg +n001474/0013_01.jpg +n001474/0020_02.jpg +n001474/0057_01.jpg +n001474/0059_05.jpg +n001474/0126_03.jpg +n001474/0131_01.jpg +n001474/0177_02.jpg +n001474/0356_02.jpg +n001474/0374_01.jpg +n001474/0386_03.jpg +n001474/0424_01.jpg +n001475/0103_02.jpg +n001476/0253_01.jpg +n001476/0377_02.jpg +n001477/0068_01.jpg +n001477/0082_01.jpg +n001477/0158_01.jpg +n001478/0058_01.jpg +n001479/0077_01.jpg +n001479/0091_02.jpg +n001479/0136_01.jpg +n001479/0183_03.jpg +n001479/0427_02.jpg +n001479/0499_01.jpg +n001480/0099_02.jpg +n001480/0169_01.jpg +n001480/0374_01.jpg +n001480/0416_01.jpg +n001480/0447_01.jpg +n001482/0076_01.jpg +n001483/0014_01.jpg +n001483/0050_02.jpg +n001483/0104_02.jpg +n001483/0157_01.jpg +n001483/0168_01.jpg +n001483/0221_01.jpg +n001483/0223_01.jpg +n001483/0224_01.jpg +n001483/0261_01.jpg +n001484/0005_01.jpg +n001484/0020_01.jpg +n001484/0063_01.jpg +n001484/0118_02.jpg +n001484/0133_02.jpg +n001484/0157_01.jpg +n001484/0178_01.jpg +n001484/0215_01.jpg +n001484/0217_01.jpg +n001484/0247_01.jpg +n001484/0245_01.jpg +n001484/0293_01.jpg +n001484/0312_01.jpg +n001484/0340_08.jpg +n001484/0404_01.jpg +n001484/0496_01.jpg +n001486/0092_03.jpg +n001486/0368_02.jpg +n001487/0030_01.jpg +n001487/0066_02.jpg +n001487/0103_01.jpg +n001488/0203_01.jpg +n001488/0284_01.jpg +n001488/0306_01.jpg +n001488/0327_01.jpg +n001489/0036_01.jpg +n001489/0051_02.jpg +n001489/0056_01.jpg +n001489/0072_03.jpg +n001489/0172_01.jpg +n001489/0341_01.jpg +n001489/0373_01.jpg +n001490/0012_01.jpg +n001490/0140_01.jpg +n001490/0186_01.jpg +n001490/0190_01.jpg +n001490/0223_01.jpg +n001490/0601_02.jpg +n001491/0017_01.jpg +n001491/0037_02.jpg +n001491/0263_02.jpg +n001491/0366_01.jpg +n001491/0367_01.jpg +n001491/0385_01.jpg +n001491/0394_01.jpg +n001492/0074_02.jpg +n001492/0266_01.jpg +n001492/0306_02.jpg +n001492/0541_03.jpg +n001493/0055_02.jpg +n001493/0306_01.jpg +n001493/0410_01.jpg +n001493/0500_01.jpg +n001494/0077_03.jpg +n001494/0139_01.jpg +n001494/0169_01.jpg +n001494/0208_01.jpg +n001494/0210_01.jpg +n001494/0297_01.jpg +n001494/0372_02.jpg +n001494/0392_01.jpg +n001494/0409_03.jpg +n001495/0025_01.jpg +n001495/0033_02.jpg +n001495/0040_01.jpg +n001495/0104_02.jpg +n001495/0115_01.jpg +n001495/0130_01.jpg +n001495/0188_01.jpg +n001495/0197_02.jpg +n001495/0225_02.jpg +n001495/0290_02.jpg +n001495/0298_02.jpg +n001495/0335_01.jpg +n001495/0337_01.jpg +n001495/0375_02.jpg +n001495/0398_01.jpg +n001495/0423_01.jpg +n001495/0431_02.jpg +n001495/0434_02.jpg +n001495/0465_01.jpg +n001495/0461_01.jpg +n001495/0473_01.jpg +n001495/0491_03.jpg +n001495/0496_01.jpg +n001495/0508_01.jpg +n001495/0511_01.jpg +n001495/0538_02.jpg +n001495/0543_01.jpg +n001495/0546_01.jpg +n001495/0593_02.jpg +n001495/0617_01.jpg +n001495/0639_01.jpg +n001496/0187_01.jpg +n001496/0336_02.jpg +n001496/0337_02.jpg +n001496/0410_01.jpg +n001496/0416_01.jpg +n001496/0452_03.jpg +n001496/0589_01.jpg +n001496/0646_01.jpg +n001497/0021_01.jpg +n001497/0067_02.jpg +n001497/0070_01.jpg +n001497/0134_01.jpg +n001497/0139_01.jpg +n001497/0175_01.jpg +n001497/0209_01.jpg +n001497/0214_02.jpg +n001497/0224_01.jpg +n001497/0231_02.jpg +n001497/0241_01.jpg +n001497/0352_01.jpg +n001497/0421_01.jpg +n001497/0479_01.jpg +n001497/0472_01.jpg +n001497/0502_01.jpg +n001498/0033_01.jpg +n001498/0035_01.jpg +n001499/0022_02.jpg +n001499/0047_02.jpg +n001499/0056_02.jpg +n001499/0058_02.jpg +n001499/0063_01.jpg +n001499/0068_01.jpg +n001499/0075_02.jpg +n001499/0080_01.jpg +n001499/0092_01.jpg +n001499/0093_02.jpg +n001499/0099_01.jpg +n001499/0100_03.jpg +n001499/0113_01.jpg +n001499/0119_01.jpg +n001499/0120_01.jpg +n001499/0122_01.jpg +n001499/0137_01.jpg +n001499/0163_02.jpg +n001499/0177_01.jpg +n001499/0232_01.jpg +n001499/0245_02.jpg +n001499/0327_01.jpg +n001499/0337_02.jpg +n001499/0359_01.jpg +n001499/0379_01.jpg +n001500/0072_01.jpg +n001500/0222_02.jpg +n001500/0239_01.jpg +n001500/0313_01.jpg +n001501/0039_01.jpg +n001501/0065_01.jpg +n001501/0082_02.jpg +n001501/0186_01.jpg +n001501/0232_01.jpg +n001501/0256_01.jpg +n001501/0258_01.jpg +n001501/0312_01.jpg +n001501/0345_01.jpg +n001501/0390_01.jpg +n001502/0054_02.jpg +n001502/0092_02.jpg +n001502/0271_01.jpg +n001502/0385_01.jpg +n001502/0540_01.jpg +n001503/0066_02.jpg +n001503/0101_01.jpg +n001503/0115_01.jpg +n001503/0116_01.jpg +n001503/0415_01.jpg +n001503/0421_02.jpg +n001504/0141_02.jpg +n001504/0234_01.jpg +n001504/0304_03.jpg +n001504/0440_01.jpg +n001505/0088_01.jpg +n001505/0406_02.jpg +n001505/0415_01.jpg +n001505/0418_01.jpg +n001505/0444_01.jpg +n001506/0147_01.jpg +n001506/0198_01.jpg +n001506/0225_01.jpg +n001506/0312_01.jpg +n001506/0388_01.jpg +n001506/0390_01.jpg +n001507/0105_01.jpg +n001508/0061_02.jpg +n001508/0078_02.jpg +n001508/0108_02.jpg +n001508/0141_01.jpg +n001508/0217_01.jpg +n001508/0383_03.jpg +n001508/0581_02.jpg +n001508/0723_01.jpg +n001509/0004_01.jpg +n001509/0018_01.jpg +n001509/0023_01.jpg +n001509/0043_01.jpg +n001509/0076_01.jpg +n001509/0104_02.jpg +n001509/0141_01.jpg +n001509/0205_02.jpg +n001509/0215_02.jpg +n001509/0266_01.jpg +n001509/0300_02.jpg +n001509/0314_01.jpg +n001509/0475_01.jpg +n001509/0525_02.jpg +n001511/0029_01.jpg +n001511/0034_02.jpg +n001511/0105_01.jpg +n001511/0131_01.jpg +n001511/0133_01.jpg +n001511/0155_02.jpg +n001511/0196_02.jpg +n001511/0238_01.jpg +n001511/0312_01.jpg +n001511/0326_01.jpg +n001511/0331_02.jpg +n001511/0374_01.jpg +n001512/0003_05.jpg +n001512/0031_01.jpg +n001512/0044_01.jpg +n001512/0044_02.jpg +n001512/0087_01.jpg +n001512/0096_02.jpg +n001512/0136_01.jpg +n001512/0608_01.jpg +n001513/0006_02.jpg +n001513/0038_01.jpg +n001513/0041_01.jpg +n001513/0089_02.jpg +n001513/0122_01.jpg +n001513/0245_01.jpg +n001513/0264_01.jpg +n001513/0387_01.jpg +n001514/0181_01.jpg +n001514/0239_01.jpg +n001514/0256_02.jpg +n001514/0490_01.jpg +n001514/0502_02.jpg +n001515/0047_03.jpg +n001515/0051_02.jpg +n001515/0204_01.jpg +n001515/0231_02.jpg +n001515/0301_01.jpg +n001515/0415_01.jpg +n001516/0040_01.jpg +n001516/0072_02.jpg +n001516/0283_01.jpg +n001518/0155_01.jpg +n001518/0280_02.jpg +n001518/0283_02.jpg +n001518/0393_02.jpg +n001518/0422_01.jpg +n001518/0516_05.jpg +n001519/0258_02.jpg +n001519/0366_01.jpg +n001520/0084_01.jpg +n001520/0094_02.jpg +n001520/0160_03.jpg +n001520/0168_01.jpg +n001520/0190_01.jpg +n001520/0280_01.jpg +n001520/0348_01.jpg +n001520/0391_02.jpg +n001520/0393_01.jpg +n001520/0420_01.jpg +n001521/0044_01.jpg +n001521/0045_03.jpg +n001521/0052_01.jpg +n001521/0107_01.jpg +n001521/0125_01.jpg +n001521/0152_01.jpg +n001521/0165_01.jpg +n001521/0171_03.jpg +n001521/0173_01.jpg +n001521/0175_01.jpg +n001521/0186_02.jpg +n001521/0187_02.jpg +n001521/0204_01.jpg +n001521/0217_01.jpg +n001521/0235_01.jpg +n001521/0302_01.jpg +n001522/0168_01.jpg +n001522/0245_01.jpg +n001522/0311_02.jpg +n001522/0355_01.jpg +n001523/0071_02.jpg +n001523/0115_01.jpg +n001523/0200_01.jpg +n001523/0296_03.jpg +n001523/0338_02.jpg +n001523/0400_01.jpg +n001523/0475_01.jpg +n001525/0003_01.jpg +n001525/0049_01.jpg +n001525/0088_01.jpg +n001525/0126_01.jpg +n001525/0263_01.jpg +n001525/0334_01.jpg +n001526/0010_02.jpg +n001526/0060_02.jpg +n001526/0061_01.jpg +n001526/0095_01.jpg +n001526/0095_02.jpg +n001526/0136_05.jpg +n001526/0132_02.jpg +n001526/0132_01.jpg +n001526/0507_01.jpg +n001528/0125_01.jpg +n001528/0134_01.jpg +n001528/0359_02.jpg +n001529/0170_01.jpg +n001529/0237_01.jpg +n001529/0250_01.jpg +n001529/0309_01.jpg +n001530/0076_01.jpg +n001530/0172_02.jpg +n001530/0198_01.jpg +n001530/0235_01.jpg +n001530/0304_01.jpg +n001530/0312_02.jpg +n001530/0328_01.jpg +n001530/0411_01.jpg +n001530/0420_01.jpg +n001530/0461_02.jpg +n001531/0002_01.jpg +n001531/0038_01.jpg +n001531/0136_03.jpg +n001531/0141_01.jpg +n001531/0157_01.jpg +n001531/0157_02.jpg +n001532/0220_01.jpg +n001533/0278_01.jpg +n001534/0038_01.jpg +n001534/0121_01.jpg +n001534/0244_02.jpg +n001534/0342_02.jpg +n001534/0359_01.jpg +n001534/0382_02.jpg +n001534/0397_02.jpg +n001535/0014_01.jpg +n001535/0049_02.jpg +n001535/0140_01.jpg +n001535/0140_02.jpg +n001535/0158_02.jpg +n001535/0179_01.jpg +n001535/0181_01.jpg +n001535/0238_02.jpg +n001535/0238_03.jpg +n001535/0239_01.jpg +n001535/0505_01.jpg +n001535/0505_02.jpg +n001535/0520_01.jpg +n001536/0026_02.jpg +n001536/0029_02.jpg +n001536/0056_01.jpg +n001536/0123_01.jpg +n001536/0161_03.jpg +n001536/0238_02.jpg +n001536/0248_02.jpg +n001536/0267_01.jpg +n001536/0276_03.jpg +n001536/0311_01.jpg +n001536/0334_01.jpg +n001536/0339_01.jpg +n001536/0340_03.jpg +n001537/0081_13.jpg +n001537/0157_01.jpg +n001537/0310_01.jpg +n001537/0394_01.jpg +n001537/0412_01.jpg +n001538/0200_01.jpg +n001538/0271_01.jpg +n001538/0376_01.jpg +n001538/0490_01.jpg +n001539/0057_01.jpg +n001539/0152_01.jpg +n001539/0225_02.jpg +n001539/0270_01.jpg +n001540/0004_03.jpg +n001540/0092_01.jpg +n001540/0141_01.jpg +n001540/0162_01.jpg +n001540/0189_02.jpg +n001540/0305_01.jpg +n001540/0311_01.jpg +n001540/0327_01.jpg +n001540/0376_01.jpg +n001540/0513_03.jpg +n001540/0532_01.jpg +n001541/0052_01.jpg +n001541/0230_01.jpg +n001541/0398_01.jpg +n001541/0409_01.jpg +n001541/0470_03.jpg +n001541/0486_01.jpg +n001542/0253_03.jpg +n001543/0038_02.jpg +n001543/0370_01.jpg +n001543/0375_01.jpg +n001545/0157_01.jpg +n001545/0194_01.jpg +n001545/0292_01.jpg +n001545/0310_02.jpg +n001546/0049_06.jpg +n001546/0499_01.jpg +n001547/0046_01.jpg +n001547/0048_01.jpg +n001547/0089_01.jpg +n001547/0144_01.jpg +n001547/0158_01.jpg +n001547/0179_01.jpg +n001547/0180_02.jpg +n001547/0191_01.jpg +n001547/0659_01.jpg +n001548/0005_01.jpg +n001548/0112_02.jpg +n001548/0141_03.jpg +n001548/0142_02.jpg +n001548/0174_01.jpg +n001548/0173_05.jpg +n001548/0449_01.jpg +n001548/0520_01.jpg +n001549/0393_02.jpg +n001549/0412_03.jpg +n001549/0416_02.jpg +n001550/0006_02.jpg +n001550/0004_01.jpg +n001550/0003_01.jpg +n001550/0064_02.jpg +n001550/0102_01.jpg +n001550/0471_01.jpg +n001551/0159_02.jpg +n001551/0254_01.jpg +n001551/0265_01.jpg +n001551/0290_01.jpg +n001551/0315_01.jpg +n001551/0346_01.jpg +n001551/0466_01.jpg +n001552/0033_02.jpg +n001552/0069_02.jpg +n001552/0076_02.jpg +n001552/0091_02.jpg +n001552/0103_02.jpg +n001552/0136_02.jpg +n001552/0141_02.jpg +n001552/0221_03.jpg +n001552/0251_02.jpg +n001552/0294_02.jpg +n001552/0296_02.jpg +n001552/0301_01.jpg +n001552/0313_02.jpg +n001552/0385_03.jpg +n001552/0398_02.jpg +n001552/0400_01.jpg +n001552/0461_01.jpg +n001552/0482_02.jpg +n001552/0491_02.jpg +n001553/0099_02.jpg +n001553/0112_01.jpg +n001553/0130_01.jpg +n001553/0204_01.jpg +n001553/0265_01.jpg +n001553/0316_01.jpg +n001553/0332_01.jpg +n001553/0336_01.jpg +n001553/0461_02.jpg +n001554/0130_01.jpg +n001554/0131_01.jpg +n001554/0137_01.jpg +n001554/0160_01.jpg +n001555/0071_01.jpg +n001555/0101_03.jpg +n001555/0137_01.jpg +n001555/0264_01.jpg +n001555/0267_02.jpg +n001555/0375_01.jpg +n001557/0028_01.jpg +n001557/0189_02.jpg +n001559/0244_02.jpg +n001559/0269_01.jpg +n001559/0512_01.jpg +n001560/0003_01.jpg +n001560/0119_01.jpg +n001560/0127_03.jpg +n001560/0149_01.jpg +n001560/0290_06.jpg +n001560/0302_01.jpg +n001560/0358_02.jpg +n001560/0384_02.jpg +n001560/0397_01.jpg +n001560/0499_01.jpg +n001561/0025_02.jpg +n001561/0076_01.jpg +n001561/0173_02.jpg +n001561/0175_01.jpg +n001561/0212_02.jpg +n001561/0330_01.jpg +n001561/0353_01.jpg +n001561/0378_01.jpg +n001561/0464_02.jpg +n001561/0502_01.jpg +n001562/0098_01.jpg +n001562/0119_01.jpg +n001562/0192_02.jpg +n001562/0198_02.jpg +n001562/0282_01.jpg +n001562/0302_01.jpg +n001562/0318_02.jpg +n001563/0040_01.jpg +n001563/0055_01.jpg +n001563/0076_01.jpg +n001563/0081_01.jpg +n001563/0171_01.jpg +n001563/0204_01.jpg +n001563/0213_01.jpg +n001563/0259_01.jpg +n001563/0261_02.jpg +n001563/0350_01.jpg +n001563/0441_03.jpg +n001565/0072_01.jpg +n001565/0146_02.jpg +n001565/0257_01.jpg +n001565/0404_01.jpg +n001566/0005_02.jpg +n001566/0022_03.jpg +n001566/0044_01.jpg +n001566/0045_03.jpg +n001566/0046_01.jpg +n001566/0057_01.jpg +n001566/0110_01.jpg +n001566/0131_01.jpg +n001566/0172_02.jpg +n001566/0191_01.jpg +n001566/0233_01.jpg +n001566/0276_03.jpg +n001566/0297_02.jpg +n001566/0387_01.jpg +n001566/0431_01.jpg +n001566/0432_02.jpg +n001566/0438_01.jpg +n001566/0476_02.jpg +n001566/0535_01.jpg +n001566/0624_01.jpg +n001566/0676_02.jpg +n001567/0005_01.jpg +n001567/0007_01.jpg +n001567/0119_01.jpg +n001567/0126_01.jpg +n001567/0136_02.jpg +n001567/0164_02.jpg +n001567/0310_02.jpg +n001567/0331_02.jpg +n001567/0412_01.jpg +n001567/0469_01.jpg +n001568/0365_02.jpg +n001569/0082_01.jpg +n001569/0085_01.jpg +n001569/0165_01.jpg +n001571/0179_02.jpg +n001573/0002_01.jpg +n001573/0172_02.jpg +n001573/0189_02.jpg +n001573/0304_02.jpg +n001574/0043_01.jpg +n001574/0052_02.jpg +n001574/0106_02.jpg +n001574/0155_01.jpg +n001574/0219_01.jpg +n001574/0266_01.jpg +n001574/0267_02.jpg +n001574/0273_01.jpg +n001575/0019_02.jpg +n001575/0392_01.jpg +n001577/0123_01.jpg +n001577/0175_01.jpg +n001577/0290_01.jpg +n001578/0065_01.jpg +n001578/0211_01.jpg +n001578/0236_01.jpg +n001578/0312_01.jpg +n001578/0370_02.jpg +n001578/0403_01.jpg +n001579/0069_01.jpg +n001579/0100_01.jpg +n001579/0202_01.jpg +n001579/0481_01.jpg +n001579/0691_01.jpg +n001579/0699_01.jpg +n001579/0707_02.jpg +n001579/0718_02.jpg +n001580/0038_01.jpg +n001580/0057_01.jpg +n001580/0075_01.jpg +n001580/0080_01.jpg +n001580/0250_01.jpg +n001582/0002_02.jpg +n001582/0145_02.jpg +n001583/0008_02.jpg +n001583/0008_03.jpg +n001583/0043_01.jpg +n001583/0041_01.jpg +n001583/0040_02.jpg +n001583/0111_02.jpg +n001583/0111_03.jpg +n001583/0136_02.jpg +n001583/0136_03.jpg +n001583/0142_01.jpg +n001583/0220_01.jpg +n001584/0001_02.jpg +n001584/0145_02.jpg +n001584/0174_01.jpg +n001584/0287_02.jpg +n001584/0416_02.jpg +n001584/0419_02.jpg +n001585/0053_01.jpg +n001585/0145_01.jpg +n001585/0216_01.jpg +n001585/0220_01.jpg +n001585/0394_01.jpg +n001585/0651_01.jpg +n001586/0064_01.jpg +n001586/0109_01.jpg +n001586/0351_01.jpg +n001586/0547_02.jpg +n001586/0598_01.jpg +n001586/0623_01.jpg +n001586/0723_02.jpg +n001586/0927_01.jpg +n001587/0026_01.jpg +n001587/0132_01.jpg +n001587/0149_01.jpg +n001587/0157_02.jpg +n001587/0190_02.jpg +n001587/0206_01.jpg +n001587/0222_01.jpg +n001587/0305_02.jpg +n001587/0323_02.jpg +n001587/0458_01.jpg +n001587/0458_02.jpg +n001587/0465_01.jpg +n001587/0570_02.jpg +n001587/0620_02.jpg +n001587/0684_03.jpg +n001587/0693_02.jpg +n001588/0079_02.jpg +n001588/0080_01.jpg +n001588/0455_02.jpg +n001588/0526_01.jpg +n001588/0621_03.jpg +n001589/0125_01.jpg +n001589/0131_01.jpg +n001589/0133_02.jpg +n001589/0152_01.jpg +n001589/0191_01.jpg +n001589/0233_01.jpg +n001589/0264_02.jpg +n001589/0349_01.jpg +n001589/0366_02.jpg +n001589/0392_01.jpg +n001589/0394_02.jpg +n001589/0528_02.jpg +n001589/0538_01.jpg +n001590/0013_01.jpg +n001590/0039_01.jpg +n001590/0113_02.jpg +n001590/0125_01.jpg +n001590/0191_01.jpg +n001591/0052_02.jpg +n001591/0138_03.jpg +n001591/0530_01.jpg +n001592/0021_01.jpg +n001592/0050_05.jpg +n001592/0065_02.jpg +n001592/0110_02.jpg +n001592/0131_01.jpg +n001592/0142_01.jpg +n001592/0163_02.jpg +n001592/0173_01.jpg +n001592/0229_01.jpg +n001592/0268_01.jpg +n001592/0306_01.jpg +n001592/0329_01.jpg +n001592/0380_02.jpg +n001592/0436_01.jpg +n001592/0455_01.jpg +n001592/0517_01.jpg +n001592/0523_02.jpg +n001592/0615_01.jpg +n001593/0123_01.jpg +n001593/0143_01.jpg +n001593/0867_01.jpg +n001593/1168_01.jpg +n001594/0035_01.jpg +n001594/0045_01.jpg +n001594/0045_02.jpg +n001594/0052_01.jpg +n001594/0083_01.jpg +n001594/0100_01.jpg +n001594/0115_01.jpg +n001594/0119_02.jpg +n001594/0121_01.jpg +n001594/0174_01.jpg +n001594/0230_04.jpg +n001594/0249_01.jpg +n001594/0329_02.jpg +n001594/0332_01.jpg +n001595/0001_02.jpg +n001595/0013_01.jpg +n001595/0042_01.jpg +n001595/0043_01.jpg +n001595/0054_02.jpg +n001595/0105_01.jpg +n001595/0113_02.jpg +n001595/0131_02.jpg +n001595/0165_02.jpg +n001595/0172_01.jpg +n001595/0204_01.jpg +n001595/0216_01.jpg +n001595/0222_03.jpg +n001595/0250_01.jpg +n001595/0266_01.jpg +n001595/0320_02.jpg +n001595/0386_02.jpg +n001595/0416_01.jpg +n001596/0066_02.jpg +n001596/0230_02.jpg +n001596/0601_01.jpg +n001597/0093_01.jpg +n001597/0151_02.jpg +n001598/0021_01.jpg +n001598/0100_01.jpg +n001598/0196_02.jpg +n001598/0393_01.jpg +n001598/0399_02.jpg +n001598/0402_01.jpg +n001599/0115_01.jpg +n001599/0212_01.jpg +n001599/0262_01.jpg +n001599/0449_01.jpg +n001600/0005_01.jpg +n001600/0063_03.jpg +n001600/0134_01.jpg +n001600/0206_01.jpg +n001600/0208_02.jpg +n001600/0215_01.jpg +n001600/0231_01.jpg +n001600/0241_01.jpg +n001600/0383_03.jpg +n001600/0394_03.jpg +n001600/0407_01.jpg +n001600/0463_01.jpg +n001601/0003_01.jpg +n001601/0014_01.jpg +n001601/0053_01.jpg +n001601/0080_01.jpg +n001601/0095_01.jpg +n001601/0109_01.jpg +n001601/0226_01.jpg +n001601/0351_01.jpg +n001601/0357_02.jpg +n001601/0373_01.jpg +n001601/0374_01.jpg +n001601/0399_01.jpg +n001601/0403_01.jpg +n001601/0411_01.jpg +n001601/0415_03.jpg +n001601/0431_01.jpg +n001601/0435_01.jpg +n001601/0453_02.jpg +n001601/0513_01.jpg +n001602/0107_01.jpg +n001602/0122_01.jpg +n001602/0146_02.jpg +n001602/0236_03.jpg +n001602/0335_01.jpg +n001602/0343_02.jpg +n001603/0005_01.jpg +n001603/0086_01.jpg +n001603/0171_01.jpg +n001603/0192_01.jpg +n001603/0229_01.jpg +n001603/0268_01.jpg +n001603/0335_02.jpg +n001604/0006_01.jpg +n001604/0015_01.jpg +n001604/0059_01.jpg +n001604/0064_01.jpg +n001604/0116_01.jpg +n001604/0136_01.jpg +n001604/0164_01.jpg +n001604/0217_01.jpg +n001604/0255_01.jpg +n001604/0318_01.jpg +n001604/0474_01.jpg +n001605/0068_01.jpg +n001605/0156_01.jpg +n001606/0013_02.jpg +n001606/0090_01.jpg +n001606/0141_01.jpg +n001606/0159_01.jpg +n001606/0221_01.jpg +n001606/0226_01.jpg +n001606/0308_01.jpg +n001607/0233_01.jpg +n001607/0268_01.jpg +n001607/0286_01.jpg +n001608/0107_02.jpg +n001608/0114_01.jpg +n001608/0437_01.jpg +n001608/0470_02.jpg +n001609/0305_01.jpg +n001609/0368_01.jpg +n001610/0184_01.jpg +n001610/0185_01.jpg +n001610/0188_01.jpg +n001610/0191_02.jpg +n001610/0245_01.jpg +n001611/0068_04.jpg +n001611/0401_02.jpg +n001613/0031_01.jpg +n001613/0041_03.jpg +n001613/0060_02.jpg +n001613/0150_01.jpg +n001613/0154_01.jpg +n001613/0192_01.jpg +n001613/0203_01.jpg +n001613/0339_02.jpg +n001613/0386_02.jpg +n001614/0316_01.jpg +n001614/0354_02.jpg +n001614/0386_02.jpg +n001614/0487_02.jpg +n001616/0016_02.jpg +n001616/0033_01.jpg +n001616/0175_02.jpg +n001616/0205_03.jpg +n001616/0241_01.jpg +n001617/0046_02.jpg +n001617/0120_01.jpg +n001617/0124_01.jpg +n001617/0168_01.jpg +n001617/0215_01.jpg +n001617/0228_01.jpg +n001617/0236_03.jpg +n001617/0292_01.jpg +n001617/0324_03.jpg +n001617/0390_01.jpg +n001617/0402_01.jpg +n001617/0541_01.jpg +n001617/0566_01.jpg +n001618/0406_01.jpg +n001618/0438_01.jpg +n001618/0505_01.jpg +n001619/0013_01.jpg +n001619/0097_01.jpg +n001619/0123_01.jpg +n001619/0214_02.jpg +n001619/0213_01.jpg +n001619/0257_02.jpg +n001619/0291_02.jpg +n001620/0165_03.jpg +n001620/0195_01.jpg +n001620/0228_01.jpg +n001620/0291_01.jpg +n001620/0294_02.jpg +n001620/0346_01.jpg +n001620/0377_02.jpg +n001620/0395_02.jpg +n001620/0435_01.jpg +n001620/0446_02.jpg +n001621/0127_01.jpg +n001622/0003_01.jpg +n001622/0272_01.jpg +n001622/0340_01.jpg +n001623/0001_01.jpg +n001623/0023_02.jpg +n001623/0058_04.jpg +n001623/0067_01.jpg +n001623/0113_01.jpg +n001623/0160_01.jpg +n001623/0193_01.jpg +n001623/0207_01.jpg +n001623/0245_01.jpg +n001623/0251_01.jpg +n001623/0289_01.jpg +n001623/0345_01.jpg +n001624/0105_01.jpg +n001624/0106_02.jpg +n001624/0121_02.jpg +n001624/0130_01.jpg +n001624/0129_02.jpg +n001624/0148_01.jpg +n001624/0150_01.jpg +n001624/0151_01.jpg +n001624/0158_01.jpg +n001624/0200_02.jpg +n001624/0201_01.jpg +n001624/0205_01.jpg +n001624/0211_02.jpg +n001624/0230_01.jpg +n001624/0243_02.jpg +n001624/0261_01.jpg +n001624/0329_01.jpg +n001624/0344_01.jpg +n001625/0006_01.jpg +n001625/0023_03.jpg +n001625/0029_02.jpg +n001625/0031_01.jpg +n001625/0050_02.jpg +n001625/0078_01.jpg +n001625/0095_02.jpg +n001625/0120_02.jpg +n001625/0134_02.jpg +n001625/0158_01.jpg +n001625/0197_01.jpg +n001625/0206_02.jpg +n001627/0043_01.jpg +n001627/0091_01.jpg +n001627/0120_01.jpg +n001627/0143_01.jpg +n001627/0149_01.jpg +n001627/0176_01.jpg +n001627/0185_01.jpg +n001627/0192_01.jpg +n001627/0198_02.jpg +n001627/0208_01.jpg +n001627/0240_02.jpg +n001627/0249_02.jpg +n001627/0312_01.jpg +n001627/0341_01.jpg +n001627/0369_01.jpg +n001627/0377_03.jpg +n001627/0389_02.jpg +n001627/0402_03.jpg +n001628/0009_01.jpg +n001628/0053_01.jpg +n001628/0078_03.jpg +n001628/0095_02.jpg +n001628/0216_02.jpg +n001628/0285_01.jpg +n001629/0025_01.jpg +n001629/0058_01.jpg +n001629/0145_02.jpg +n001629/0189_02.jpg +n001629/0206_01.jpg +n001629/0223_01.jpg +n001629/0235_02.jpg +n001629/0260_01.jpg +n001629/0274_01.jpg +n001629/0292_01.jpg +n001629/0294_02.jpg +n001629/0314_01.jpg +n001629/0358_05.jpg +n001629/0422_01.jpg +n001629/0469_01.jpg +n001630/0318_01.jpg +n001631/0003_01.jpg +n001631/0283_01.jpg +n001631/0294_01.jpg +n001632/0021_03.jpg +n001632/0165_01.jpg +n001632/0171_02.jpg +n001632/0226_01.jpg +n001632/0227_02.jpg +n001632/0358_02.jpg +n001632/0385_01.jpg +n001632/0385_01.jpg +n001632/0435_09.jpg +n001633/0181_01.jpg +n001633/0271_02.jpg +n001633/0287_01.jpg +n001633/0315_01.jpg +n001634/0078_01.jpg +n001634/0078_01.jpg +n001634/0088_01.jpg +n001634/0133_01.jpg +n001634/0137_01.jpg +n001634/0205_01.jpg +n001634/0208_01.jpg +n001634/0370_02.jpg +n001636/0008_01.jpg +n001636/0041_01.jpg +n001636/0047_01.jpg +n001636/0056_01.jpg +n001636/0177_01.jpg +n001637/0134_01.jpg +n001637/0152_02.jpg +n001637/0154_02.jpg +n001637/0239_01.jpg +n001637/0268_01.jpg +n001637/0285_02.jpg +n001637/0324_01.jpg +n001637/0327_01.jpg +n001637/0335_01.jpg +n001637/0348_01.jpg +n001637/0354_01.jpg +n001637/0367_01.jpg +n001637/0373_01.jpg +n001637/0374_01.jpg +n001637/0374_01.jpg +n001637/0401_01.jpg +n001638/0167_01.jpg +n001638/0171_01.jpg +n001638/0174_02.jpg +n001638/0351_01.jpg +n001639/0009_01.jpg +n001639/0036_03.jpg +n001639/0114_01.jpg +n001639/0148_01.jpg +n001639/0149_01.jpg +n001639/0380_01.jpg +n001639/0387_01.jpg +n001639/0425_01.jpg +n001640/0004_01.jpg +n001640/0012_01.jpg +n001640/0049_02.jpg +n001640/0050_01.jpg +n001640/0057_01.jpg +n001641/0079_01.jpg +n001641/0143_02.jpg +n001641/0196_01.jpg +n001641/0271_02.jpg +n001641/0326_01.jpg +n001641/0358_01.jpg +n001641/0374_01.jpg +n001642/0042_01.jpg +n001642/0047_01.jpg +n001642/0099_01.jpg +n001642/0105_02.jpg +n001642/0134_01.jpg +n001642/0154_01.jpg +n001642/0227_03.jpg +n001642/0272_01.jpg +n001642/0279_01.jpg +n001642/0332_01.jpg +n001642/0509_02.jpg +n001642/0526_01.jpg +n001642/0596_02.jpg +n001643/0300_01.jpg +n001644/0167_01.jpg +n001644/0181_01.jpg +n001644/0223_01.jpg +n001644/0397_02.jpg +n001644/0482_02.jpg +n001645/0006_01.jpg +n001645/0014_03.jpg +n001645/0021_01.jpg +n001645/0021_02.jpg +n001645/0089_01.jpg +n001645/0152_01.jpg +n001645/0153_01.jpg +n001645/0155_01.jpg +n001645/0273_03.jpg +n001645/0299_01.jpg +n001645/0362_01.jpg +n001645/0464_01.jpg +n001645/0484_01.jpg +n001645/0488_02.jpg +n001645/0508_01.jpg +n001646/0025_01.jpg +n001646/0024_01.jpg +n001646/0055_01.jpg +n001646/0100_01.jpg +n001646/0142_01.jpg +n001646/0184_01.jpg +n001646/0209_02.jpg +n001646/0315_01.jpg +n001646/0316_02.jpg +n001646/0401_02.jpg +n001647/0143_01.jpg +n001647/0153_02.jpg +n001647/0158_01.jpg +n001647/0261_01.jpg +n001647/0269_01.jpg +n001647/0285_01.jpg +n001647/0315_01.jpg +n001647/0344_01.jpg +n001647/0372_02.jpg +n001647/0528_02.jpg +n001648/0168_01.jpg +n001648/0186_02.jpg +n001649/0113_01.jpg +n001649/0140_01.jpg +n001649/0156_01.jpg +n001649/0185_01.jpg +n001649/0187_01.jpg +n001649/0192_01.jpg +n001649/0198_02.jpg +n001649/0209_01.jpg +n001649/0294_01.jpg +n001649/0390_02.jpg +n001649/0423_01.jpg +n001651/0150_01.jpg +n001651/0302_01.jpg +n001652/0019_01.jpg +n001652/0035_02.jpg +n001652/0199_01.jpg +n001652/0235_01.jpg +n001653/0087_01.jpg +n001653/0092_01.jpg +n001653/0099_01.jpg +n001653/0100_01.jpg +n001653/0164_01.jpg +n001653/0181_01.jpg +n001653/0219_02.jpg +n001653/0225_01.jpg +n001653/0291_04.jpg +n001653/0311_01.jpg +n001653/0347_02.jpg +n001654/0023_01.jpg +n001654/0025_01.jpg +n001654/0040_01.jpg +n001654/0060_01.jpg +n001654/0071_01.jpg +n001654/0073_01.jpg +n001654/0075_01.jpg +n001654/0116_01.jpg +n001654/0118_01.jpg +n001654/0143_01.jpg +n001654/0174_01.jpg +n001654/0216_01.jpg +n001654/0233_01.jpg +n001654/0253_01.jpg +n001654/0268_05.jpg +n001654/0283_01.jpg +n001654/0288_01.jpg +n001654/0299_01.jpg +n001654/0320_01.jpg +n001654/0327_03.jpg +n001654/0329_01.jpg +n001654/0340_02.jpg +n001654/0348_01.jpg +n001654/0356_01.jpg +n001654/0358_01.jpg +n001654/0379_02.jpg +n001656/0041_01.jpg +n001656/0117_01.jpg +n001656/0194_02.jpg +n001656/0223_01.jpg +n001657/0084_01.jpg +n001657/0095_01.jpg +n001657/0247_01.jpg +n001657/0285_01.jpg +n001657/0344_01.jpg +n001657/0369_01.jpg +n001657/0378_01.jpg +n001657/0388_01.jpg +n001657/0506_01.jpg +n001657/0579_01.jpg +n001657/0664_01.jpg +n001658/0077_01.jpg +n001658/0193_03.jpg +n001658/0222_01.jpg +n001658/0324_02.jpg +n001659/0016_01.jpg +n001659/0018_02.jpg +n001659/0049_02.jpg +n001659/0121_01.jpg +n001659/0205_01.jpg +n001659/0207_02.jpg +n001659/0210_01.jpg +n001659/0249_03.jpg +n001659/0279_01.jpg +n001659/0340_01.jpg +n001659/0436_01.jpg +n001659/0440_01.jpg +n001660/0263_01.jpg +n001661/0085_01.jpg +n001662/0079_02.jpg +n001662/0080_01.jpg +n001662/0092_01.jpg +n001662/0126_01.jpg +n001663/0009_01.jpg +n001663/0016_01.jpg +n001663/0029_01.jpg +n001663/0048_03.jpg +n001663/0158_03.jpg +n001663/0182_02.jpg +n001663/0196_01.jpg +n001663/0225_01.jpg +n001663/0245_01.jpg +n001663/0256_02.jpg +n001663/0258_01.jpg +n001663/0274_01.jpg +n001663/0277_01.jpg +n001663/0312_03.jpg +n001663/0434_01.jpg +n001664/0007_01.jpg +n001664/0020_01.jpg +n001664/0023_02.jpg +n001664/0049_02.jpg +n001664/0054_02.jpg +n001664/0058_02.jpg +n001664/0060_02.jpg +n001664/0088_03.jpg +n001664/0132_03.jpg +n001664/0154_02.jpg +n001664/0168_02.jpg +n001664/0174_01.jpg +n001664/0191_01.jpg +n001664/0205_02.jpg +n001664/0214_02.jpg +n001664/0220_02.jpg +n001664/0227_01.jpg +n001664/0241_01.jpg +n001664/0263_01.jpg +n001664/0280_01.jpg +n001664/0301_01.jpg +n001664/0314_02.jpg +n001664/0324_01.jpg +n001665/0134_02.jpg +n001665/0244_03.jpg +n001665/0339_01.jpg +n001665/0361_01.jpg +n001665/0376_01.jpg +n001665/0399_02.jpg +n001666/0014_01.jpg +n001666/0093_01.jpg +n001666/0113_03.jpg +n001666/0150_01.jpg +n001666/0165_02.jpg +n001666/0203_02.jpg +n001666/0204_01.jpg +n001666/0209_02.jpg +n001666/0311_01.jpg +n001666/0315_01.jpg +n001666/0355_04.jpg +n001666/0393_07.jpg +n001666/0418_01.jpg +n001667/0012_03.jpg +n001667/0104_01.jpg +n001667/0139_01.jpg +n001667/0144_01.jpg +n001667/0152_01.jpg +n001668/0067_01.jpg +n001668/0129_01.jpg +n001668/0143_03.jpg +n001668/0194_01.jpg +n001668/0215_01.jpg +n001668/0323_02.jpg +n001668/0388_01.jpg +n001670/0005_01.jpg +n001670/0009_01.jpg +n001670/0035_01.jpg +n001670/0121_01.jpg +n001670/0218_01.jpg +n001670/0248_01.jpg +n001670/0259_01.jpg +n001670/0296_01.jpg +n001671/0030_02.jpg +n001671/0032_01.jpg +n001671/0057_01.jpg +n001671/0061_01.jpg +n001671/0086_01.jpg +n001671/0110_02.jpg +n001671/0127_02.jpg +n001671/0141_01.jpg +n001671/0167_01.jpg +n001671/0170_01.jpg +n001671/0174_02.jpg +n001671/0193_01.jpg +n001671/0204_01.jpg +n001671/0215_01.jpg +n001671/0298_01.jpg +n001671/0307_01.jpg +n001671/0315_01.jpg +n001671/0329_01.jpg +n001671/0338_01.jpg +n001671/0343_01.jpg +n001673/0006_02.jpg +n001673/0037_02.jpg +n001673/0163_01.jpg +n001673/0179_02.jpg +n001673/0198_01.jpg +n001673/0221_01.jpg +n001673/0300_01.jpg +n001673/0325_01.jpg +n001673/0356_05.jpg +n001673/0384_01.jpg +n001673/0427_01.jpg +n001673/0431_01.jpg +n001674/0026_01.jpg +n001674/0042_01.jpg +n001674/0060_01.jpg +n001674/0062_01.jpg +n001674/0084_01.jpg +n001674/0121_01.jpg +n001674/0123_02.jpg +n001674/0152_02.jpg +n001674/0163_03.jpg +n001674/0217_01.jpg +n001674/0228_01.jpg +n001674/0323_01.jpg +n001674/0375_01.jpg +n001675/0153_01.jpg +n001675/0254_02.jpg +n001675/0260_02.jpg +n001675/0282_03.jpg +n001675/0310_01.jpg +n001675/0348_01.jpg +n001675/0360_01.jpg +n001676/0002_01.jpg +n001676/0027_01.jpg +n001676/0086_03.jpg +n001676/0143_01.jpg +n001676/0206_03.jpg +n001676/0213_01.jpg +n001677/0074_02.jpg +n001677/0203_02.jpg +n001677/0223_01.jpg +n001677/0252_01.jpg +n001677/0276_02.jpg +n001677/0286_02.jpg +n001677/0289_01.jpg +n001677/0299_01.jpg +n001677/0345_01.jpg +n001677/0408_01.jpg +n001679/0077_01.jpg +n001679/0097_01.jpg +n001679/0103_01.jpg +n001679/0153_01.jpg +n001679/0204_02.jpg +n001680/0002_01.jpg +n001680/0007_05.jpg +n001680/0066_02.jpg +n001680/0073_01.jpg +n001680/0117_01.jpg +n001680/0122_01.jpg +n001680/0120_03.jpg +n001680/0124_01.jpg +n001680/0215_01.jpg +n001680/0265_01.jpg +n001680/0267_01.jpg +n001680/0292_01.jpg +n001680/0334_01.jpg +n001680/0354_01.jpg +n001680/0380_01.jpg +n001680/0529_02.jpg +n001680/0541_01.jpg +n001681/0301_02.jpg +n001681/0303_01.jpg +n001681/0418_01.jpg +n001682/0081_01.jpg +n001682/0267_02.jpg +n001682/0292_01.jpg +n001682/0318_01.jpg +n001682/0332_01.jpg +n001682/0418_04.jpg +n001684/0076_03.jpg +n001684/0097_02.jpg +n001684/0152_01.jpg +n001684/0157_01.jpg +n001684/0165_01.jpg +n001684/0309_01.jpg +n001684/0323_01.jpg +n001684/0396_02.jpg +n001684/0448_01.jpg +n001684/0453_01.jpg +n001685/0129_01.jpg +n001685/0131_01.jpg +n001686/0010_01.jpg +n001686/0138_02.jpg +n001686/0189_01.jpg +n001686/0259_01.jpg +n001686/0293_01.jpg +n001686/0336_01.jpg +n001686/0347_01.jpg +n001688/0012_01.jpg +n001688/0024_01.jpg +n001688/0064_01.jpg +n001688/0105_01.jpg +n001688/0197_03.jpg +n001688/0213_01.jpg +n001688/0327_01.jpg +n001688/0332_02.jpg +n001688/0343_01.jpg +n001688/0372_01.jpg +n001688/0377_01.jpg +n001688/0380_01.jpg +n001689/0120_01.jpg +n001689/0203_01.jpg +n001689/0222_01.jpg +n001689/0223_01.jpg +n001690/0023_01.jpg +n001690/0169_05.jpg +n001690/0176_01.jpg +n001690/0243_01.jpg +n001690/0245_01.jpg +n001690/0319_02.jpg +n001690/0350_01.jpg +n001691/0044_02.jpg +n001691/0096_02.jpg +n001691/0107_02.jpg +n001691/0173_02.jpg +n001691/0216_01.jpg +n001691/0265_02.jpg +n001691/0278_01.jpg +n001692/0008_02.jpg +n001692/0159_04.jpg +n001692/0375_01.jpg +n001693/0133_01.jpg +n001693/0185_01.jpg +n001693/0288_01.jpg +n001693/0338_02.jpg +n001693/0408_02.jpg +n001693/0488_03.jpg +n001693/0506_01.jpg +n001694/0005_01.jpg +n001694/0015_01.jpg +n001694/0029_01.jpg +n001694/0041_01.jpg +n001694/0054_01.jpg +n001694/0074_02.jpg +n001694/0085_01.jpg +n001694/0091_01.jpg +n001694/0127_01.jpg +n001694/0145_02.jpg +n001694/0152_01.jpg +n001694/0192_01.jpg +n001694/0195_01.jpg +n001694/0203_01.jpg +n001694/0206_02.jpg +n001694/0220_02.jpg +n001694/0256_01.jpg +n001694/0284_02.jpg +n001694/0356_01.jpg +n001694/0358_01.jpg +n001694/0404_01.jpg +n001695/0047_01.jpg +n001695/0049_01.jpg +n001695/0059_01.jpg +n001695/0069_02.jpg +n001695/0103_01.jpg +n001695/0207_02.jpg +n001695/0251_01.jpg +n001695/0296_03.jpg +n001695/0304_01.jpg +n001695/0432_01.jpg +n001696/0320_02.jpg +n001696/0339_01.jpg +n001697/0250_02.jpg +n001697/0269_01.jpg +n001697/0326_01.jpg +n001697/0323_01.jpg +n001697/0422_02.jpg +n001697/0431_01.jpg +n001698/0021_03.jpg +n001698/0023_01.jpg +n001698/0044_01.jpg +n001698/0046_02.jpg +n001698/0063_04.jpg +n001698/0147_02.jpg +n001698/0156_01.jpg +n001698/0158_01.jpg +n001698/0160_01.jpg +n001698/0163_01.jpg +n001698/0166_01.jpg +n001698/0167_01.jpg +n001698/0209_01.jpg +n001698/0221_02.jpg +n001698/0226_01.jpg +n001698/0293_01.jpg +n001698/0308_02.jpg +n001698/0318_01.jpg +n001698/0320_01.jpg +n001698/0323_05.jpg +n001698/0356_02.jpg +n001698/0367_01.jpg +n001699/0060_01.jpg +n001699/0076_02.jpg +n001699/0097_02.jpg +n001699/0099_01.jpg +n001699/0108_02.jpg +n001699/0187_01.jpg +n001699/0221_01.jpg +n001699/0233_01.jpg +n001699/0265_02.jpg +n001699/0285_01.jpg +n001700/0013_01.jpg +n001700/0053_01.jpg +n001700/0055_01.jpg +n001700/0057_01.jpg +n001700/0132_01.jpg +n001700/0242_05.jpg +n001700/0332_01.jpg +n001700/0613_02.jpg +n001701/0217_01.jpg +n001701/0307_01.jpg +n001701/0298_01.jpg +n001701/0345_01.jpg +n001701/0407_01.jpg +n001701/0409_01.jpg +n001701/0425_01.jpg +n001702/0114_01.jpg +n001702/0137_01.jpg +n001702/0141_01.jpg +n001702/0169_01.jpg +n001702/0175_01.jpg +n001702/0185_02.jpg +n001702/0264_01.jpg +n001702/0271_01.jpg +n001702/0301_01.jpg +n001703/0003_01.jpg +n001703/0013_01.jpg +n001703/0245_02.jpg +n001703/0254_01.jpg +n001703/0261_02.jpg +n001703/0278_01.jpg +n001703/0394_01.jpg +n001703/0459_01.jpg +n001704/0224_02.jpg +n001704/0326_04.jpg +n001704/0341_01.jpg +n001704/0343_01.jpg +n001705/0051_02.jpg +n001705/0052_02.jpg +n001705/0058_01.jpg +n001705/0083_01.jpg +n001705/0090_01.jpg +n001705/0105_02.jpg +n001705/0129_01.jpg +n001705/0133_01.jpg +n001705/0135_01.jpg +n001705/0137_02.jpg +n001705/0156_01.jpg +n001705/0169_02.jpg +n001705/0175_02.jpg +n001705/0175_03.jpg +n001705/0182_04.jpg +n001705/0197_01.jpg +n001705/0200_03.jpg +n001705/0212_02.jpg +n001705/0222_01.jpg +n001705/0225_03.jpg +n001705/0237_01.jpg +n001705/0239_01.jpg +n001705/0251_01.jpg +n001705/0313_03.jpg +n001705/0278_01.jpg +n001705/0312_01.jpg +n001705/0240_01.jpg +n001705/0319_01.jpg +n001705/0333_03.jpg +n001705/0337_01.jpg +n001705/0362_01.jpg +n001706/0036_01.jpg +n001706/0039_01.jpg +n001706/0088_01.jpg +n001706/0220_01.jpg +n001706/0266_01.jpg +n001706/0302_01.jpg +n001706/0339_01.jpg +n001706/0409_01.jpg +n001706/0461_01.jpg +n001707/0232_01.jpg +n001707/0277_01.jpg +n001707/0280_01.jpg +n001707/0283_01.jpg +n001707/0302_01.jpg +n001707/0343_01.jpg +n001709/0129_01.jpg +n001709/0194_02.jpg +n001709/0317_01.jpg +n001709/0360_01.jpg +n001711/0056_01.jpg +n001711/0083_02.jpg +n001711/0171_01.jpg +n001711/0250_01.jpg +n001711/0348_01.jpg +n001711/0367_02.jpg +n001711/0381_01.jpg +n001712/0005_01.jpg +n001712/0013_01.jpg +n001712/0013_02.jpg +n001712/0043_03.jpg +n001712/0087_01.jpg +n001712/0123_04.jpg +n001712/0133_03.jpg +n001712/0135_01.jpg +n001712/0167_02.jpg +n001712/0177_05.jpg +n001712/0180_03.jpg +n001712/0183_01.jpg +n001712/0221_01.jpg +n001712/0231_01.jpg +n001712/0237_01.jpg +n001712/0294_03.jpg +n001712/0320_02.jpg +n001712/0334_01.jpg +n001712/0338_03.jpg +n001712/0348_01.jpg +n001712/0356_01.jpg +n001712/0383_02.jpg +n001712/0412_02.jpg +n001712/0457_01.jpg +n001713/0080_02.jpg +n001713/0088_01.jpg +n001713/0122_01.jpg +n001713/0145_02.jpg +n001713/0203_01.jpg +n001713/0209_01.jpg +n001713/0240_01.jpg +n001713/0283_01.jpg +n001713/0302_01.jpg +n001713/0342_01.jpg +n001714/0076_01.jpg +n001714/0132_01.jpg +n001714/0149_01.jpg +n001714/0328_01.jpg +n001714/0327_01.jpg +n001714/0367_01.jpg +n001715/0097_01.jpg +n001715/0110_01.jpg +n001715/0124_02.jpg +n001715/0130_01.jpg +n001715/0157_01.jpg +n001715/0188_01.jpg +n001715/0229_01.jpg +n001715/0230_04.jpg +n001715/0247_02.jpg +n001715/0251_02.jpg +n001715/0257_01.jpg +n001715/0277_01.jpg +n001715/0288_02.jpg +n001715/0305_01.jpg +n001715/0325_01.jpg +n001715/0326_01.jpg +n001715/0327_02.jpg +n001715/0337_01.jpg +n001715/0343_01.jpg +n001715/0350_01.jpg +n001715/0353_01.jpg +n001715/0356_01.jpg +n001715/0357_01.jpg +n001715/0358_01.jpg +n001715/0372_03.jpg +n001715/0373_01.jpg +n001715/0434_01.jpg +n001716/0005_01.jpg +n001716/0084_01.jpg +n001716/0107_01.jpg +n001716/0151_02.jpg +n001716/0164_02.jpg +n001716/0209_02.jpg +n001716/0270_01.jpg +n001716/0278_01.jpg +n001716/0336_01.jpg +n001716/0356_01.jpg +n001716/0397_01.jpg +n001716/0421_02.jpg +n001717/0062_01.jpg +n001717/0103_01.jpg +n001717/0323_01.jpg +n001717/0339_01.jpg +n001717/0341_01.jpg +n001717/0378_01.jpg +n001717/0367_01.jpg +n001718/0004_01.jpg +n001718/0066_02.jpg +n001718/0098_02.jpg +n001718/0111_02.jpg +n001718/0191_01.jpg +n001718/0211_01.jpg +n001718/0214_01.jpg +n001718/0216_02.jpg +n001718/0238_01.jpg +n001718/0268_02.jpg +n001719/0019_01.jpg +n001719/0131_01.jpg +n001719/0181_01.jpg +n001719/0184_01.jpg +n001719/0211_03.jpg +n001719/0212_01.jpg +n001719/0245_02.jpg +n001720/0155_02.jpg +n001720/0238_01.jpg +n001720/0247_01.jpg +n001720/0284_01.jpg +n001720/0311_01.jpg +n001720/0381_02.jpg +n001720/0384_01.jpg +n001720/0432_01.jpg +n001720/0485_02.jpg +n001721/0031_01.jpg +n001721/0055_01.jpg +n001721/0072_01.jpg +n001721/0075_01.jpg +n001721/0098_02.jpg +n001721/0155_01.jpg +n001721/0155_05.jpg +n001721/0187_03.jpg +n001721/0219_01.jpg +n001721/0258_01.jpg +n001721/0266_01.jpg +n001721/0322_01.jpg +n001722/0133_02.jpg +n001722/0230_03.jpg +n001722/0267_01.jpg +n001722/0278_01.jpg +n001723/0117_02.jpg +n001723/0140_02.jpg +n001724/0016_02.jpg +n001724/0245_02.jpg +n001724/0249_01.jpg +n001724/0251_02.jpg +n001724/0283_03.jpg +n001724/0288_01.jpg +n001724/0291_01.jpg +n001724/0293_01.jpg +n001724/0297_01.jpg +n001724/0298_01.jpg +n001724/0301_01.jpg +n001724/0304_02.jpg +n001724/0306_01.jpg +n001724/0307_01.jpg +n001724/0310_01.jpg +n001724/0314_01.jpg +n001724/0315_01.jpg +n001724/0316_01.jpg +n001724/0317_01.jpg +n001724/0320_01.jpg +n001724/0321_01.jpg +n001724/0328_01.jpg +n001724/0338_01.jpg +n001724/0344_01.jpg +n001724/0347_01.jpg +n001724/0428_01.jpg +n001724/0393_01.jpg +n001724/0377_01.jpg +n001724/0375_01.jpg +n001724/0373_02.jpg +n001724/0372_01.jpg +n001724/0356_01.jpg +n001724/0348_01.jpg +n001725/0027_02.jpg +n001725/0087_01.jpg +n001725/0115_01.jpg +n001725/0121_02.jpg +n001725/0121_02.jpg +n001725/0179_02.jpg +n001725/0206_01.jpg +n001725/0211_02.jpg +n001725/0234_01.jpg +n001725/0254_02.jpg +n001725/0257_01.jpg +n001725/0258_02.jpg +n001725/0335_01.jpg +n001725/0346_01.jpg +n001726/0003_01.jpg +n001726/0156_02.jpg +n001726/0159_02.jpg +n001726/0161_02.jpg +n001726/0160_01.jpg +n001726/0169_01.jpg +n001726/0169_02.jpg +n001726/0192_01.jpg +n001726/0202_02.jpg +n001726/0205_02.jpg +n001726/0271_01.jpg +n001726/0315_02.jpg +n001726/0330_01.jpg +n001726/0343_02.jpg +n001726/0354_01.jpg +n001726/0380_01.jpg +n001727/0008_01.jpg +n001727/0214_01.jpg +n001727/0217_01.jpg +n001727/0231_01.jpg +n001727/0307_02.jpg +n001727/0317_02.jpg +n001727/0324_01.jpg +n001727/0374_01.jpg +n001727/0467_02.jpg +n001727/0484_02.jpg +n001727/0537_01.jpg +n001727/0597_01.jpg +n001728/0260_01.jpg +n001728/0310_01.jpg +n001728/0343_01.jpg +n001728/0364_01.jpg +n001728/0372_01.jpg +n001728/0390_01.jpg +n001728/0464_02.jpg +n001728/0474_01.jpg +n001728/0501_01.jpg +n001728/0505_01.jpg +n001728/0560_01.jpg +n001728/0575_01.jpg +n001728/0580_03.jpg +n001729/0059_01.jpg +n001729/0152_01.jpg +n001729/0179_01.jpg +n001729/0196_01.jpg +n001729/0301_02.jpg +n001729/0381_01.jpg +n001729/0384_01.jpg +n001730/0201_01.jpg +n001730/0222_01.jpg +n001730/0234_01.jpg +n001730/0247_02.jpg +n001731/0041_01.jpg +n001731/0175_01.jpg +n001731/0185_01.jpg +n001731/0201_01.jpg +n001731/0219_02.jpg +n001731/0265_02.jpg +n001731/0277_01.jpg +n001731/0283_01.jpg +n001731/0289_01.jpg +n001731/0308_01.jpg +n001731/0311_02.jpg +n001731/0338_01.jpg +n001731/0387_01.jpg +n001731/0419_02.jpg +n001731/0424_01.jpg +n001732/0106_01.jpg +n001733/0007_02.jpg +n001733/0008_01.jpg +n001733/0048_01.jpg +n001733/0075_01.jpg +n001733/0077_01.jpg +n001733/0093_01.jpg +n001733/0130_01.jpg +n001733/0149_01.jpg +n001733/0153_07.jpg +n001733/0170_02.jpg +n001733/0170_04.jpg +n001733/0175_01.jpg +n001733/0179_01.jpg +n001733/0193_01.jpg +n001733/0205_01.jpg +n001733/0213_02.jpg +n001733/0236_01.jpg +n001733/0263_01.jpg +n001733/0271_01.jpg +n001733/0303_01.jpg +n001733/0308_02.jpg +n001733/0329_01.jpg +n001733/0335_01.jpg +n001733/0339_01.jpg +n001733/0344_02.jpg +n001733/0372_01.jpg +n001733/0386_01.jpg +n001733/0394_03.jpg +n001734/0024_01.jpg +n001734/0373_01.jpg +n001734/0463_01.jpg +n001735/0001_01.jpg +n001735/0022_01.jpg +n001735/0026_03.jpg +n001735/0031_01.jpg +n001735/0074_01.jpg +n001735/0097_01.jpg +n001735/0102_01.jpg +n001735/0119_01.jpg +n001735/0122_02.jpg +n001735/0125_01.jpg +n001735/0131_02.jpg +n001735/0237_01.jpg +n001735/0244_01.jpg +n001735/0246_01.jpg +n001735/0274_02.jpg +n001735/0383_02.jpg +n001735/0401_01.jpg +n001736/0104_02.jpg +n001737/0034_02.jpg +n001737/0043_01.jpg +n001737/0094_01.jpg +n001737/0110_01.jpg +n001738/0034_01.jpg +n001738/0101_01.jpg +n001738/0148_01.jpg +n001738/0155_01.jpg +n001738/0171_03.jpg +n001738/0198_01.jpg +n001738/0231_01.jpg +n001738/0248_03.jpg +n001738/0331_01.jpg +n001738/0334_01.jpg +n001739/0079_01.jpg +n001739/0213_01.jpg +n001740/0122_02.jpg +n001740/0133_01.jpg +n001740/0136_01.jpg +n001740/0271_01.jpg +n001741/0006_01.jpg +n001741/0033_01.jpg +n001741/0058_02.jpg +n001741/0090_01.jpg +n001741/0218_01.jpg +n001741/0225_01.jpg +n001741/0229_01.jpg +n001741/0240_01.jpg +n001741/0250_02.jpg +n001741/0289_01.jpg +n001741/0361_01.jpg +n001742/0134_01.jpg +n001742/0238_01.jpg +n001742/0276_01.jpg +n001743/0029_04.jpg +n001744/0038_01.jpg +n001744/0104_02.jpg +n001744/0128_02.jpg +n001744/0141_03.jpg +n001744/0169_02.jpg +n001744/0273_01.jpg +n001744/0313_02.jpg +n001744/0327_02.jpg +n001744/0349_02.jpg +n001744/0394_01.jpg +n001744/0400_01.jpg +n001745/0040_01.jpg +n001745/0090_03.jpg +n001745/0091_02.jpg +n001745/0111_01.jpg +n001745/0126_01.jpg +n001745/0149_02.jpg +n001745/0290_01.jpg +n001746/0157_01.jpg +n001746/0234_01.jpg +n001747/0083_01.jpg +n001747/0216_01.jpg +n001747/0332_01.jpg +n001747/0346_05.jpg +n001747/0353_01.jpg +n001747/0362_01.jpg +n001747/0368_01.jpg +n001747/0408_01.jpg +n001747/0428_01.jpg +n001747/0452_01.jpg +n001747/0477_01.jpg +n001748/0071_01.jpg +n001748/0074_01.jpg +n001748/0132_01.jpg +n001748/0200_01.jpg +n001748/0248_02.jpg +n001748/0264_01.jpg +n001748/0277_01.jpg +n001748/0286_01.jpg +n001748/0289_01.jpg +n001748/0334_01.jpg +n001748/0336_01.jpg +n001748/0345_01.jpg +n001748/0430_01.jpg +n001748/0432_02.jpg +n001748/0452_01.jpg +n001748/0486_01.jpg +n001749/0030_01.jpg +n001749/0252_01.jpg +n001749/0335_02.jpg +n001749/0339_01.jpg +n001749/0406_01.jpg +n001749/0446_01.jpg +n001749/0512_02.jpg +n001750/0152_01.jpg +n001750/0162_02.jpg +n001750/0176_01.jpg +n001750/0216_01.jpg +n001750/0353_01.jpg +n001750/0388_02.jpg +n001751/0115_01.jpg +n001751/0299_01.jpg +n001752/0007_01.jpg +n001752/0121_01.jpg +n001752/0187_02.jpg +n001752/0254_02.jpg +n001753/0049_02.jpg +n001753/0073_01.jpg +n001753/0250_01.jpg +n001753/0284_03.jpg +n001753/0316_02.jpg +n001753/0404_04.jpg +n001753/0461_01.jpg +n001754/0079_02.jpg +n001754/0179_01.jpg +n001754/0214_02.jpg +n001754/0245_01.jpg +n001754/0325_01.jpg +n001754/0329_01.jpg +n001754/0336_01.jpg +n001755/0030_01.jpg +n001755/0105_01.jpg +n001756/0026_01.jpg +n001756/0166_02.jpg +n001756/0288_02.jpg +n001757/0017_01.jpg +n001757/0157_01.jpg +n001757/0367_01.jpg +n001758/0033_02.jpg +n001758/0040_01.jpg +n001758/0140_03.jpg +n001758/0303_07.jpg +n001758/0311_01.jpg +n001758/0311_02.jpg +n001758/0385_02.jpg +n001758/0388_03.jpg +n001758/0513_03.jpg +n001759/0070_01.jpg +n001759/0140_01.jpg +n001759/0157_01.jpg +n001759/0261_02.jpg +n001759/0272_01.jpg +n001759/0279_01.jpg +n001759/0307_03.jpg +n001759/0308_02.jpg +n001759/0370_01.jpg +n001759/0412_01.jpg +n001759/0517_02.jpg +n001759/0671_02.jpg +n001759/0696_01.jpg +n001760/0006_02.jpg +n001760/0010_01.jpg +n001760/0019_05.jpg +n001760/0033_02.jpg +n001760/0034_04.jpg +n001760/0063_02.jpg +n001760/0069_02.jpg +n001760/0076_02.jpg +n001760/0080_04.jpg +n001760/0111_01.jpg +n001760/0143_01.jpg +n001760/0163_03.jpg +n001760/0315_01.jpg +n001760/0322_01.jpg +n001760/0330_01.jpg +n001760/0343_01.jpg +n001760/0380_01.jpg +n001761/0005_02.jpg +n001761/0025_01.jpg +n001761/0115_01.jpg +n001761/0209_01.jpg +n001761/0252_01.jpg +n001761/0676_01.jpg +n001761/0713_02.jpg +n001762/0069_01.jpg +n001762/0070_01.jpg +n001762/0272_01.jpg +n001762/0276_01.jpg +n001762/0335_01.jpg +n001763/0099_01.jpg +n001763/0126_01.jpg +n001763/0191_01.jpg +n001763/0479_01.jpg +n001763/0480_02.jpg +n001764/0219_02.jpg +n001764/0428_01.jpg +n001765/0010_01.jpg +n001765/0178_01.jpg +n001765/0259_01.jpg +n001765/0575_02.jpg +n001765/0582_01.jpg +n001766/0008_02.jpg +n001766/0040_01.jpg +n001766/0169_02.jpg +n001766/0213_01.jpg +n001766/0287_01.jpg +n001766/0323_03.jpg +n001767/0048_01.jpg +n001767/0249_01.jpg +n001767/0543_02.jpg +n001768/0023_03.jpg +n001768/0055_01.jpg +n001768/0082_02.jpg +n001768/0152_01.jpg +n001768/0212_01.jpg +n001768/0286_01.jpg +n001768/0463_01.jpg +n001768/0466_01.jpg +n001768/0518_02.jpg +n001768/0567_02.jpg +n001768/0610_01.jpg +n001769/0041_01.jpg +n001769/0098_01.jpg +n001769/0161_02.jpg +n001769/0186_01.jpg +n001769/0195_02.jpg +n001769/0244_01.jpg +n001769/0322_01.jpg +n001769/0394_01.jpg +n001770/0046_01.jpg +n001770/0115_01.jpg +n001770/0215_03.jpg +n001770/0215_06.jpg +n001770/0293_04.jpg +n001770/0305_02.jpg +n001770/0312_01.jpg +n001770/0318_01.jpg +n001771/0101_01.jpg +n001771/0108_01.jpg +n001771/0208_02.jpg +n001771/0226_01.jpg +n001771/0232_02.jpg +n001771/0257_01.jpg +n001771/0257_02.jpg +n001771/0260_01.jpg +n001771/0260_02.jpg +n001771/0289_01.jpg +n001771/0299_02.jpg +n001771/0318_02.jpg +n001771/0337_01.jpg +n001771/0389_01.jpg +n001771/0455_02.jpg +n001771/0456_02.jpg +n001771/0463_01.jpg +n001771/0465_01.jpg +n001771/0493_02.jpg +n001771/0499_01.jpg +n001771/0579_02.jpg +n001771/0583_01.jpg +n001771/0585_01.jpg +n001771/0596_01.jpg +n001771/0686_02.jpg +n001771/0695_01.jpg +n001771/0704_01.jpg +n001772/0038_01.jpg +n001772/0329_01.jpg +n001772/0391_01.jpg +n001772/0402_01.jpg +n001773/0019_01.jpg +n001773/0026_03.jpg +n001773/0029_04.jpg +n001773/0075_01.jpg +n001773/0089_01.jpg +n001773/0161_01.jpg +n001773/0238_02.jpg +n001773/0306_01.jpg +n001773/0337_01.jpg +n001773/0531_01.jpg +n001773/0604_01.jpg +n001773/0631_01.jpg +n001773/0643_02.jpg +n001773/0646_02.jpg +n001774/0055_02.jpg +n001774/0103_01.jpg +n001774/0110_01.jpg +n001774/0176_01.jpg +n001774/0208_01.jpg +n001774/0267_02.jpg +n001774/0274_01.jpg +n001774/0296_01.jpg +n001774/0316_01.jpg +n001774/0318_01.jpg +n001775/0004_01.jpg +n001775/0030_01.jpg +n001775/0047_02.jpg +n001775/0048_01.jpg +n001775/0050_01.jpg +n001775/0058_01.jpg +n001775/0080_02.jpg +n001775/0219_01.jpg +n001775/0220_02.jpg +n001775/0236_02.jpg +n001775/0264_02.jpg +n001775/0324_01.jpg +n001775/0345_01.jpg +n001775/0522_02.jpg +n001775/0526_01.jpg +n001775/0660_03.jpg +n001775/0672_01.jpg +n001776/0103_02.jpg +n001776/0210_01.jpg +n001776/0263_02.jpg +n001776/0288_01.jpg +n001777/0060_01.jpg +n001777/0141_01.jpg +n001777/0150_01.jpg +n001778/0005_01.jpg +n001778/0165_01.jpg +n001778/0280_01.jpg +n001778/0342_01.jpg +n001778/0346_01.jpg +n001780/0043_01.jpg +n001780/0332_01.jpg +n001780/0447_02.jpg +n001780/0455_01.jpg +n001780/0475_01.jpg +n001782/0008_01.jpg +n001782/0111_01.jpg +n001782/0312_02.jpg +n001782/0342_01.jpg +n001782/0342_02.jpg +n001782/0403_02.jpg +n001783/0138_01.jpg +n001783/0175_01.jpg +n001783/0229_02.jpg +n001783/0235_01.jpg +n001783/0246_01.jpg +n001783/0321_02.jpg +n001783/0341_02.jpg +n001783/0448_03.jpg +n001784/0166_01.jpg +n001784/0212_02.jpg +n001784/0231_02.jpg +n001785/0293_02.jpg +n001785/0309_01.jpg +n001785/0414_01.jpg +n001785/0419_01.jpg +n001786/0291_01.jpg +n001788/0034_02.jpg +n001788/0081_02.jpg +n001788/0117_01.jpg +n001788/0481_01.jpg +n001789/0131_01.jpg +n001789/0133_01.jpg +n001789/0138_01.jpg +n001789/0385_01.jpg +n001790/0053_01.jpg +n001790/0060_01.jpg +n001790/0063_02.jpg +n001790/0066_01.jpg +n001790/0120_01.jpg +n001790/0142_01.jpg +n001790/0157_01.jpg +n001790/0227_01.jpg +n001790/0238_01.jpg +n001790/0238_02.jpg +n001790/0242_01.jpg +n001790/0252_02.jpg +n001790/0271_01.jpg +n001790/0302_01.jpg +n001790/0337_03.jpg +n001790/0464_01.jpg +n001791/0133_01.jpg +n001791/0460_01.jpg +n001792/0047_01.jpg +n001792/0055_01.jpg +n001792/0127_01.jpg +n001792/0187_01.jpg +n001792/0229_02.jpg +n001792/0260_01.jpg +n001792/0262_02.jpg +n001793/0004_02.jpg +n001793/0073_01.jpg +n001793/0090_01.jpg +n001793/0094_01.jpg +n001793/0103_02.jpg +n001793/0107_01.jpg +n001793/0114_01.jpg +n001793/0118_02.jpg +n001793/0130_01.jpg +n001793/0150_01.jpg +n001793/0165_01.jpg +n001793/0168_01.jpg +n001793/0187_02.jpg +n001793/0188_01.jpg +n001793/0201_01.jpg +n001793/0220_01.jpg +n001793/0248_02.jpg +n001793/0264_01.jpg +n001793/0307_02.jpg +n001794/0035_01.jpg +n001794/0119_01.jpg +n001794/0165_01.jpg +n001794/0238_01.jpg +n001794/0268_02.jpg +n001794/0334_01.jpg +n001794/0346_01.jpg +n001794/0390_01.jpg +n001794/0430_01.jpg +n001794/0442_02.jpg +n001795/0015_01.jpg +n001795/0016_02.jpg +n001795/0092_02.jpg +n001795/0198_01.jpg +n001795/0241_01.jpg +n001795/0341_02.jpg +n001796/0312_01.jpg +n001796/0324_01.jpg +n001796/0329_01.jpg +n001797/0097_02.jpg +n001797/0188_01.jpg +n001797/0449_01.jpg +n001797/0452_01.jpg +n001798/0181_02.jpg +n001799/0116_01.jpg +n001799/0202_02.jpg +n001799/0271_01.jpg +n001799/0263_01.jpg +n001799/0265_02.jpg +n001799/0379_01.jpg +n001799/0383_01.jpg +n001799/0385_01.jpg +n001800/0001_01.jpg +n001800/0004_02.jpg +n001800/0058_02.jpg +n001800/0184_01.jpg +n001801/0006_01.jpg +n001802/0404_02.jpg +n001803/0009_01.jpg +n001803/0121_02.jpg +n001803/0155_03.jpg +n001803/0159_01.jpg +n001803/0247_01.jpg +n001803/0247_02.jpg +n001803/0280_01.jpg +n001803/0280_02.jpg +n001803/0289_01.jpg +n001803/0310_01.jpg +n001803/0463_01.jpg +n001803/0515_02.jpg +n001803/0533_05.jpg +n001804/0373_01.jpg +n001805/0028_01.jpg +n001805/0055_01.jpg +n001805/0061_01.jpg +n001805/0067_01.jpg +n001805/0177_02.jpg +n001805/0261_01.jpg +n001805/0263_01.jpg +n001805/0295_01.jpg +n001805/0407_01.jpg +n001805/0420_02.jpg +n001805/0438_03.jpg +n001805/0466_01.jpg +n001806/0530_04.jpg +n001807/0020_01.jpg +n001807/0166_01.jpg +n001808/0018_01.jpg +n001808/0055_01.jpg +n001808/0087_01.jpg +n001808/0121_01.jpg +n001808/0133_01.jpg +n001808/0182_01.jpg +n001808/0184_02.jpg +n001808/0222_02.jpg +n001808/0245_03.jpg +n001808/0357_02.jpg +n001808/0417_02.jpg +n001809/0004_01.jpg +n001809/0038_01.jpg +n001809/0090_01.jpg +n001809/0096_01.jpg +n001809/0122_01.jpg +n001809/0142_01.jpg +n001809/0232_02.jpg +n001809/0275_01.jpg +n001809/0294_02.jpg +n001810/0183_01.jpg +n001810/0214_01.jpg +n001810/0260_04.jpg +n001812/0029_03.jpg +n001812/0043_01.jpg +n001812/0076_02.jpg +n001812/0101_01.jpg +n001812/0231_02.jpg +n001812/0242_02.jpg +n001812/0344_01.jpg +n001813/0002_01.jpg +n001813/0072_01.jpg +n001813/0090_02.jpg +n001813/0105_01.jpg +n001813/0200_01.jpg +n001813/0220_01.jpg +n001813/0225_02.jpg +n001813/0226_01.jpg +n001813/0237_01.jpg +n001813/0265_01.jpg +n001813/0273_01.jpg +n001813/0281_01.jpg +n001813/0284_01.jpg +n001813/0346_01.jpg +n001813/0348_02.jpg +n001813/0352_01.jpg +n001813/0366_01.jpg +n001813/0375_01.jpg +n001813/0386_01.jpg +n001814/0139_01.jpg +n001814/0161_03.jpg +n001814/0173_01.jpg +n001814/0280_01.jpg +n001815/0178_01.jpg +n001815/0240_01.jpg +n001815/0251_01.jpg +n001815/0308_01.jpg +n001818/0014_01.jpg +n001818/0100_01.jpg +n001818/0139_01.jpg +n001820/0036_02.jpg +n001820/0059_02.jpg +n001820/0079_05.jpg +n001820/0101_01.jpg +n001820/0136_01.jpg +n001820/0148_01.jpg +n001820/0185_02.jpg +n001820/0193_01.jpg +n001820/0335_02.jpg +n001821/0080_01.jpg +n001821/0174_03.jpg +n001822/0041_01.jpg +n001822/0044_02.jpg +n001822/0380_02.jpg +n001823/0413_01.jpg +n001823/0422_01.jpg +n001823/0464_03.jpg +n001824/0007_01.jpg +n001824/0319_01.jpg +n001825/0042_01.jpg +n001825/0114_01.jpg +n001825/0190_01.jpg +n001825/0191_01.jpg +n001825/0210_02.jpg +n001825/0269_01.jpg +n001825/0269_02.jpg +n001826/0049_01.jpg +n001826/0099_01.jpg +n001826/0133_01.jpg +n001826/0153_01.jpg +n001826/0177_01.jpg +n001826/0275_01.jpg +n001826/0327_02.jpg +n001826/0327_01.jpg +n001826/0403_01.jpg +n001826/0418_02.jpg +n001826/0441_01.jpg +n001827/0218_01.jpg +n001827/0245_01.jpg +n001827/0279_02.jpg +n001827/0288_01.jpg +n001828/0031_01.jpg +n001828/0038_01.jpg +n001828/0050_02.jpg +n001828/0081_01.jpg +n001828/0130_01.jpg +n001828/0159_01.jpg +n001828/0309_01.jpg +n001828/0356_01.jpg +n001828/0387_01.jpg +n001829/0033_02.jpg +n001829/0085_02.jpg +n001829/0091_01.jpg +n001829/0123_01.jpg +n001829/0166_02.jpg +n001829/0277_02.jpg +n001831/0007_01.jpg +n001831/0129_01.jpg +n001831/0256_01.jpg +n001831/0288_01.jpg +n001832/0087_01.jpg +n001832/0158_01.jpg +n001832/0165_02.jpg +n001832/0179_01.jpg +n001832/0183_01.jpg +n001832/0250_02.jpg +n001832/0193_01.jpg +n001832/0290_01.jpg +n001832/0304_01.jpg +n001832/0309_01.jpg +n001833/0072_01.jpg +n001833/0105_01.jpg +n001833/0120_01.jpg +n001833/0145_01.jpg +n001833/0173_01.jpg +n001833/0222_01.jpg +n001833/0284_01.jpg +n001833/0292_02.jpg +n001833/0311_01.jpg +n001833/0331_01.jpg +n001833/0336_02.jpg +n001833/0432_02.jpg +n001833/0440_01.jpg +n001833/0449_01.jpg +n001833/0502_03.jpg +n001833/0520_02.jpg +n001834/0014_01.jpg +n001834/0031_01.jpg +n001834/0054_01.jpg +n001834/0112_01.jpg +n001834/0118_02.jpg +n001834/0148_02.jpg +n001834/0208_02.jpg +n001834/0231_01.jpg +n001834/0234_01.jpg +n001834/0240_02.jpg +n001834/0281_01.jpg +n001834/0283_01.jpg +n001834/0284_01.jpg +n001834/0453_01.jpg +n001834/0510_01.jpg +n001834/0517_02.jpg +n001835/0032_02.jpg +n001835/0043_01.jpg +n001835/0053_01.jpg +n001835/0212_01.jpg +n001835/0284_01.jpg +n001835/0380_03.jpg +n001835/0383_02.jpg +n001837/0046_01.jpg +n001837/0131_02.jpg +n001837/0154_01.jpg +n001837/0207_01.jpg +n001837/0367_01.jpg +n001837/0398_02.jpg +n001837/0556_02.jpg +n001837/0576_02.jpg +n001839/0080_02.jpg +n001839/0082_02.jpg +n001839/0089_01.jpg +n001839/0095_01.jpg +n001839/0207_01.jpg +n001839/0214_01.jpg +n001839/0287_01.jpg +n001839/0287_02.jpg +n001839/0301_01.jpg +n001839/0301_02.jpg +n001839/0309_01.jpg +n001839/0318_01.jpg +n001839/0557_02.jpg +n001839/0558_01.jpg +n001840/0161_01.jpg +n001841/0001_01.jpg +n001841/0003_01.jpg +n001841/0080_01.jpg +n001841/0087_01.jpg +n001841/0156_01.jpg +n001841/0159_01.jpg +n001841/0199_01.jpg +n001841/0222_01.jpg +n001841/0236_01.jpg +n001841/0298_01.jpg +n001841/0363_01.jpg +n001841/0426_01.jpg +n001842/0225_01.jpg +n001842/0315_01.jpg +n001843/0040_01.jpg +n001843/0041_01.jpg +n001843/0045_01.jpg +n001843/0047_01.jpg +n001843/0052_01.jpg +n001843/0137_02.jpg +n001843/0139_01.jpg +n001843/0238_01.jpg +n001843/0244_02.jpg +n001843/0318_01.jpg +n001843/0363_01.jpg +n001843/0406_01.jpg +n001843/0437_01.jpg +n001843/0445_01.jpg +n001843/0473_01.jpg +n001843/0484_01.jpg +n001843/0491_01.jpg +n001843/0562_03.jpg +n001843/0564_02.jpg +n001843/0581_01.jpg +n001844/0055_01.jpg +n001844/0077_03.jpg +n001844/0081_01.jpg +n001844/0229_01.jpg +n001844/0308_01.jpg +n001844/0368_03.jpg +n001844/0372_02.jpg +n001845/0001_02.jpg +n001845/0006_06.jpg +n001845/0105_01.jpg +n001845/0229_05.jpg +n001845/0258_01.jpg +n001845/0355_01.jpg +n001845/0372_01.jpg +n001845/0414_04.jpg +n001846/0348_05.jpg +n001846/0354_01.jpg +n001847/0211_02.jpg +n001847/0244_01.jpg +n001847/0268_01.jpg +n001848/0053_01.jpg +n001848/0063_02.jpg +n001848/0087_01.jpg +n001848/0084_03.jpg +n001848/0221_01.jpg +n001848/0300_01.jpg +n001848/0311_01.jpg +n001848/0326_01.jpg +n001849/0088_01.jpg +n001849/0093_01.jpg +n001849/0095_01.jpg +n001849/0096_01.jpg +n001849/0170_01.jpg +n001851/0080_02.jpg +n001851/0090_01.jpg +n001851/0170_01.jpg +n001851/0176_01.jpg +n001851/0247_01.jpg +n001851/0272_04.jpg +n001851/0278_01.jpg +n001851/0279_02.jpg +n001851/0317_02.jpg +n001851/0329_02.jpg +n001851/0341_02.jpg +n001851/0344_01.jpg +n001851/0347_02.jpg +n001851/0379_02.jpg +n001851/0392_03.jpg +n001851/0405_01.jpg +n001851/0418_02.jpg +n001851/0439_02.jpg +n001851/0522_03.jpg +n001852/0076_02.jpg +n001852/0079_03.jpg +n001852/0085_02.jpg +n001852/0120_02.jpg +n001852/0126_01.jpg +n001852/0176_01.jpg +n001852/0179_01.jpg +n001852/0194_01.jpg +n001852/0195_01.jpg +n001852/0197_01.jpg +n001852/0199_01.jpg +n001852/0220_01.jpg +n001852/0258_01.jpg +n001852/0260_02.jpg +n001852/0267_01.jpg +n001852/0275_01.jpg +n001852/0291_01.jpg +n001852/0303_02.jpg +n001852/0307_02.jpg +n001852/0340_01.jpg +n001852/0342_01.jpg +n001852/0375_02.jpg +n001853/0298_02.jpg +n001853/0305_01.jpg +n001854/0154_01.jpg +n001854/0243_01.jpg +n001854/0268_02.jpg +n001854/0291_01.jpg +n001855/0329_01.jpg +n001856/0100_01.jpg +n001856/0170_01.jpg +n001856/0225_01.jpg +n001856/0230_04.jpg +n001856/0231_01.jpg +n001856/0232_01.jpg +n001856/0240_02.jpg +n001856/0350_01.jpg +n001858/0028_02.jpg +n001858/0036_01.jpg +n001858/0039_01.jpg +n001858/0062_03.jpg +n001858/0072_02.jpg +n001858/0091_01.jpg +n001858/0095_01.jpg +n001858/0102_03.jpg +n001858/0119_01.jpg +n001858/0140_01.jpg +n001858/0183_03.jpg +n001858/0199_01.jpg +n001858/0213_01.jpg +n001858/0213_02.jpg +n001858/0276_02.jpg +n001858/0352_04.jpg +n001858/0465_03.jpg +n001858/0655_01.jpg +n001858/0727_01.jpg +n001858/1054_01.jpg +n001858/1055_01.jpg +n001858/1074_02.jpg +n001858/1096_01.jpg +n001859/0484_01.jpg +n001860/0190_01.jpg +n001860/0197_01.jpg +n001860/0231_01.jpg +n001860/0238_01.jpg +n001860/0295_01.jpg +n001860/0301_01.jpg +n001860/0311_01.jpg +n001860/0352_01.jpg +n001860/0368_01.jpg +n001860/0370_01.jpg +n001860/0373_03.jpg +n001860/0394_01.jpg +n001860/0396_01.jpg +n001860/0398_01.jpg +n001860/0410_02.jpg +n001860/0505_01.jpg +n001860/0528_01.jpg +n001861/0067_01.jpg +n001861/0111_01.jpg +n001861/0185_01.jpg +n001861/0297_01.jpg +n001861/0316_02.jpg +n001861/0356_01.jpg +n001862/0211_01.jpg +n001863/0087_02.jpg +n001863/0131_02.jpg +n001863/0152_01.jpg +n001863/0214_01.jpg +n001863/0214_02.jpg +n001863/0217_02.jpg +n001863/0223_01.jpg +n001863/0478_01.jpg +n001864/0008_01.jpg +n001864/0037_01.jpg +n001864/0329_01.jpg +n001865/0028_02.jpg +n001865/0259_02.jpg +n001865/0332_02.jpg +n001865/0484_01.jpg +n001866/0151_01.jpg +n001866/0460_01.jpg +n001867/0090_01.jpg +n001867/0140_03.jpg +n001867/0148_01.jpg +n001867/0178_01.jpg +n001867/0179_01.jpg +n001867/0193_01.jpg +n001867/0205_02.jpg +n001867/0233_01.jpg +n001867/0249_02.jpg +n001868/0012_01.jpg +n001868/0094_03.jpg +n001868/0127_01.jpg +n001868/0139_02.jpg +n001868/0140_02.jpg +n001868/0141_04.jpg +n001868/0156_01.jpg +n001868/0164_02.jpg +n001868/0189_03.jpg +n001868/0203_06.jpg +n001868/0204_02.jpg +n001868/0233_01.jpg +n001868/0250_02.jpg +n001868/0275_01.jpg +n001868/0332_01.jpg +n001869/0101_02.jpg +n001869/0203_01.jpg +n001869/0222_02.jpg +n001870/0057_01.jpg +n001870/0086_01.jpg +n001870/0190_01.jpg +n001870/0201_01.jpg +n001870/0211_01.jpg +n001870/0216_01.jpg +n001870/0270_01.jpg +n001870/0343_02.jpg +n001871/0097_02.jpg +n001871/0159_01.jpg +n001871/0195_01.jpg +n001871/0270_01.jpg +n001871/0379_01.jpg +n001871/0447_01.jpg +n001871/0459_01.jpg +n001871/0460_01.jpg +n001872/0044_02.jpg +n001872/0223_01.jpg +n001872/0225_01.jpg +n001872/0231_01.jpg +n001872/0237_01.jpg +n001872/0244_01.jpg +n001872/0254_01.jpg +n001872/0411_01.jpg +n001873/0184_05.jpg +n001873/0362_01.jpg +n001873/0435_02.jpg +n001874/0083_01.jpg +n001874/0078_01.jpg +n001875/0207_01.jpg +n001875/0357_02.jpg +n001876/0003_02.jpg +n001876/0061_03.jpg +n001876/0136_04.jpg +n001876/0307_01.jpg +n001876/0342_01.jpg +n001877/0165_02.jpg +n001879/0048_01.jpg +n001879/0057_01.jpg +n001879/0059_01.jpg +n001879/0089_01.jpg +n001879/0093_02.jpg +n001879/0103_01.jpg +n001879/0276_02.jpg +n001879/0292_01.jpg +n001879/0294_01.jpg +n001879/0308_01.jpg +n001879/0316_01.jpg +n001879/0326_01.jpg +n001879/0350_01.jpg +n001879/0367_01.jpg +n001880/0050_01.jpg +n001880/0148_02.jpg +n001880/0157_02.jpg +n001880/0332_02.jpg +n001880/0348_01.jpg +n001880/0410_01.jpg +n001880/0411_01.jpg +n001880/0436_02.jpg +n001881/0018_01.jpg +n001881/0023_02.jpg +n001881/0024_01.jpg +n001881/0051_01.jpg +n001881/0252_02.jpg +n001881/0285_03.jpg +n001881/0447_02.jpg +n001881/0468_01.jpg +n001881/0486_01.jpg +n001881/0511_01.jpg +n001882/0238_02.jpg +n001882/0241_01.jpg +n001882/0247_01.jpg +n001882/0287_01.jpg +n001882/0290_01.jpg +n001882/0298_01.jpg +n001882/0303_01.jpg +n001882/0304_01.jpg +n001882/0322_02.jpg +n001882/0358_01.jpg +n001882/0403_02.jpg +n001882/0412_03.jpg +n001883/0030_01.jpg +n001883/0044_01.jpg +n001883/0112_03.jpg +n001883/0149_01.jpg +n001883/0206_01.jpg +n001884/0084_01.jpg +n001884/0236_01.jpg +n001884/0247_01.jpg +n001884/0287_01.jpg +n001884/0365_01.jpg +n001885/0022_02.jpg +n001885/0083_01.jpg +n001885/0118_02.jpg +n001885/0121_03.jpg +n001885/0126_01.jpg +n001885/0143_02.jpg +n001885/0152_01.jpg +n001885/0167_01.jpg +n001885/0233_01.jpg +n001885/0272_01.jpg +n001885/0277_02.jpg +n001885/0279_04.jpg +n001885/0288_02.jpg +n001885/0296_01.jpg +n001886/0174_01.jpg +n001886/0192_01.jpg +n001886/0204_03.jpg +n001886/0206_03.jpg +n001886/0210_01.jpg +n001886/0218_01.jpg +n001886/0233_01.jpg +n001887/0032_01.jpg +n001887/0040_01.jpg +n001887/0062_01.jpg +n001887/0124_01.jpg +n001887/0140_01.jpg +n001887/0151_01.jpg +n001887/0235_01.jpg +n001887/0320_01.jpg +n001887/0421_03.jpg +n001888/0086_02.jpg +n001888/0423_03.jpg +n001889/0001_02.jpg +n001889/0022_01.jpg +n001889/0023_01.jpg +n001889/0026_01.jpg +n001889/0088_01.jpg +n001889/0229_01.jpg +n001889/0244_01.jpg +n001889/0284_01.jpg +n001889/0309_01.jpg +n001889/0325_01.jpg +n001890/0278_02.jpg +n001890/0364_01.jpg +n001890/0456_02.jpg +n001891/0143_01.jpg +n001891/0247_01.jpg +n001891/0264_01.jpg +n001891/0299_01.jpg +n001891/0478_01.jpg +n001892/0010_02.jpg +n001892/0040_01.jpg +n001892/0100_01.jpg +n001893/0060_01.jpg +n001893/0059_01.jpg +n001893/0104_01.jpg +n001893/0185_01.jpg +n001894/0049_01.jpg +n001894/0052_01.jpg +n001894/0061_01.jpg +n001894/0091_01.jpg +n001894/0094_02.jpg +n001894/0115_01.jpg +n001894/0117_02.jpg +n001894/0124_01.jpg +n001894/0218_01.jpg +n001894/0231_01.jpg +n001894/0253_02.jpg +n001894/0344_01.jpg +n001894/0360_02.jpg +n001894/0411_01.jpg +n001894/0407_01.jpg +n001895/0009_02.jpg +n001895/0098_01.jpg +n001895/0105_01.jpg +n001895/0134_01.jpg +n001895/0142_01.jpg +n001895/0147_01.jpg +n001895/0189_01.jpg +n001895/0195_02.jpg +n001895/0196_02.jpg +n001895/0205_02.jpg +n001895/0207_01.jpg +n001895/0253_02.jpg +n001895/0257_01.jpg +n001895/0260_03.jpg +n001895/0310_02.jpg +n001895/0311_02.jpg +n001896/0028_01.jpg +n001896/0076_01.jpg +n001896/0218_01.jpg +n001896/0226_01.jpg +n001896/0242_01.jpg +n001896/0247_02.jpg +n001896/0256_02.jpg +n001896/0257_01.jpg +n001896/0261_02.jpg +n001897/0023_01.jpg +n001897/0036_01.jpg +n001897/0048_01.jpg +n001897/0146_01.jpg +n001897/0185_02.jpg +n001897/0219_01.jpg +n001899/0139_02.jpg +n001899/0198_01.jpg +n001899/0232_01.jpg +n001899/0523_02.jpg +n001900/0102_01.jpg +n001900/0289_02.jpg +n001900/0327_01.jpg +n001900/0339_01.jpg +n001900/0345_02.jpg +n001900/0369_01.jpg +n001901/0030_01.jpg +n001901/0093_03.jpg +n001901/0100_01.jpg +n001901/0101_02.jpg +n001901/0116_01.jpg +n001901/0131_01.jpg +n001901/0162_01.jpg +n001901/0225_01.jpg +n001901/0239_02.jpg +n001901/0254_01.jpg +n001901/0275_01.jpg +n001901/0320_01.jpg +n001901/0322_03.jpg +n001902/0072_01.jpg +n001902/0078_03.jpg +n001903/0368_02.jpg +n001903/0399_01.jpg +n001903/0432_01.jpg +n001903/0455_01.jpg +n001904/0148_01.jpg +n001904/0319_01.jpg +n001904/0321_01.jpg +n001904/0350_01.jpg +n001905/0114_02.jpg +n001905/0116_01.jpg +n001905/0155_02.jpg +n001905/0181_01.jpg +n001905/0267_02.jpg +n001905/0286_04.jpg +n001905/0334_02.jpg +n001905/0350_01.jpg +n001905/0366_02.jpg +n001905/0367_01.jpg +n001905/0383_01.jpg +n001905/0434_01.jpg +n001905/0442_02.jpg +n001905/0518_01.jpg +n001906/0007_02.jpg +n001906/0075_01.jpg +n001906/0076_03.jpg +n001906/0079_01.jpg +n001906/0104_01.jpg +n001906/0113_02.jpg +n001906/0152_02.jpg +n001906/0186_01.jpg +n001906/0314_02.jpg +n001906/0330_01.jpg +n001906/0377_01.jpg +n001907/0056_01.jpg +n001907/0093_01.jpg +n001907/0092_02.jpg +n001907/0100_01.jpg +n001907/0103_01.jpg +n001907/0139_02.jpg +n001907/0175_02.jpg +n001907/0253_01.jpg +n001907/0302_01.jpg +n001907/0348_01.jpg +n001907/0398_01.jpg +n001908/0005_01.jpg +n001908/0044_01.jpg +n001908/0266_02.jpg +n001908/0367_03.jpg +n001909/0027_02.jpg +n001909/0044_01.jpg +n001909/0201_02.jpg +n001909/0222_01.jpg +n001909/0274_01.jpg +n001909/0309_01.jpg +n001909/0316_01.jpg +n001909/0327_01.jpg +n001909/0361_01.jpg +n001909/0362_01.jpg +n001910/0105_01.jpg +n001910/0106_01.jpg +n001910/0141_01.jpg +n001910/0141_02.jpg +n001910/0146_01.jpg +n001910/0154_02.jpg +n001910/0206_02.jpg +n001910/0228_02.jpg +n001910/0282_01.jpg +n001910/0291_01.jpg +n001911/0007_02.jpg +n001911/0090_02.jpg +n001911/0206_01.jpg +n001911/0291_01.jpg +n001911/0332_01.jpg +n001911/0339_02.jpg +n001911/0403_01.jpg +n001911/0450_01.jpg +n001911/0473_01.jpg +n001912/0048_03.jpg +n001912/0051_01.jpg +n001912/0116_02.jpg +n001912/0130_01.jpg +n001912/0168_01.jpg +n001912/0278_02.jpg +n001912/0316_02.jpg +n001913/0011_02.jpg +n001913/0019_01.jpg +n001913/0025_01.jpg +n001913/0114_01.jpg +n001914/0158_02.jpg +n001914/0243_01.jpg +n001914/0376_02.jpg +n001914/0402_01.jpg +n001915/0029_02.jpg +n001915/0049_02.jpg +n001915/0081_01.jpg +n001915/0113_04.jpg +n001915/0127_01.jpg +n001915/0184_01.jpg +n001915/0206_02.jpg +n001915/0207_01.jpg +n001915/0236_01.jpg +n001915/0268_01.jpg +n001915/0271_01.jpg +n001915/0273_02.jpg +n001915/0343_02.jpg +n001916/0010_01.jpg +n001916/0210_01.jpg +n001917/0046_01.jpg +n001917/0062_01.jpg +n001917/0067_01.jpg +n001917/0084_01.jpg +n001917/0138_03.jpg +n001917/0145_03.jpg +n001917/0162_01.jpg +n001917/0210_01.jpg +n001917/0246_01.jpg +n001917/0258_01.jpg +n001917/0317_01.jpg +n001917/0362_01.jpg +n001917/0370_01.jpg +n001917/0557_01.jpg +n001917/0624_03.jpg +n001918/0104_01.jpg +n001918/0107_01.jpg +n001918/0217_03.jpg +n001918/0240_01.jpg +n001918/0295_01.jpg +n001919/0174_02.jpg +n001919/0241_01.jpg +n001919/0284_02.jpg +n001919/0407_01.jpg +n001920/0059_02.jpg +n001920/0067_01.jpg +n001920/0121_01.jpg +n001920/0162_03.jpg +n001920/0171_02.jpg +n001920/0189_01.jpg +n001920/0210_02.jpg +n001920/0329_01.jpg +n001920/0332_01.jpg +n001920/0358_01.jpg +n001920/0368_01.jpg +n001920/0374_01.jpg +n001920/0425_02.jpg +n001922/0025_01.jpg +n001922/0064_01.jpg +n001922/0107_02.jpg +n001922/0111_01.jpg +n001922/0110_01.jpg +n001922/0128_01.jpg +n001922/0192_01.jpg +n001922/0317_01.jpg +n001922/0327_01.jpg +n001922/0364_02.jpg +n001922/0392_02.jpg +n001924/0002_01.jpg +n001924/0058_01.jpg +n001924/0191_01.jpg +n001924/0199_01.jpg +n001924/0223_01.jpg +n001924/0226_01.jpg +n001924/0254_01.jpg +n001924/0276_02.jpg +n001924/0320_01.jpg +n001925/0068_01.jpg +n001926/0019_01.jpg +n001926/0040_02.jpg +n001926/0069_01.jpg +n001926/0070_02.jpg +n001926/0080_01.jpg +n001926/0139_01.jpg +n001926/0168_01.jpg +n001926/0192_01.jpg +n001926/0203_01.jpg +n001926/0276_01.jpg +n001926/0304_01.jpg +n001926/0347_01.jpg +n001926/0359_01.jpg +n001926/0366_01.jpg +n001928/0149_01.jpg +n001930/0039_01.jpg +n001930/0057_06.jpg +n001930/0073_03.jpg +n001930/0104_02.jpg +n001930/0193_01.jpg +n001930/0215_01.jpg +n001930/0408_01.jpg +n001930/0440_03.jpg +n001931/0112_01.jpg +n001931/0114_01.jpg +n001931/0187_02.jpg +n001933/0134_01.jpg +n001936/0006_01.jpg +n001936/0052_01.jpg +n001936/0107_01.jpg +n001936/0107_02.jpg +n001936/0127_02.jpg +n001936/0133_01.jpg +n001936/0159_01.jpg +n001936/0160_03.jpg +n001936/0228_01.jpg +n001936/0231_02.jpg +n001936/0240_01.jpg +n001936/0241_01.jpg +n001936/0264_02.jpg +n001936/0318_01.jpg +n001936/0329_01.jpg +n001936/0338_03.jpg +n001936/0351_02.jpg +n001936/0356_02.jpg +n001936/0360_01.jpg +n001936/0399_01.jpg +n001936/0414_01.jpg +n001936/0432_01.jpg +n001936/0466_03.jpg +n001937/0012_02.jpg +n001937/0111_02.jpg +n001937/0115_02.jpg +n001937/0275_01.jpg +n001937/0293_02.jpg +n001937/0332_02.jpg +n001937/0361_01.jpg +n001937/0364_01.jpg +n001937/0410_01.jpg +n001937/0487_01.jpg +n001938/0009_01.jpg +n001938/0075_01.jpg +n001938/0196_01.jpg +n001938/0307_01.jpg +n001938/0450_01.jpg +n001939/0023_01.jpg +n001939/0177_01.jpg +n001939/0219_02.jpg +n001939/0250_01.jpg +n001939/0248_01.jpg +n001939/0327_02.jpg +n001939/0347_01.jpg +n001939/0370_01.jpg +n001939/0407_01.jpg +n001939/0421_01.jpg +n001939/0439_02.jpg +n001940/0067_01.jpg +n001940/0150_02.jpg +n001940/0154_02.jpg +n001940/0215_02.jpg +n001940/0257_01.jpg +n001940/0274_01.jpg +n001940/0286_02.jpg +n001940/0300_01.jpg +n001940/0316_01.jpg +n001940/0358_01.jpg +n001940/0368_02.jpg +n001940/0404_02.jpg +n001940/0416_02.jpg +n001941/0042_01.jpg +n001942/0113_01.jpg +n001942/0123_02.jpg +n001942/0165_02.jpg +n001943/0026_01.jpg +n001943/0240_01.jpg +n001943/0530_02.jpg +n001943/0822_02.jpg +n001944/0110_01.jpg +n001944/0124_01.jpg +n001944/0166_03.jpg +n001944/0192_01.jpg +n001944/0194_02.jpg +n001944/0221_01.jpg +n001944/0228_01.jpg +n001944/0233_02.jpg +n001944/0239_01.jpg +n001944/0287_02.jpg +n001944/0327_04.jpg +n001944/0338_02.jpg +n001945/0297_02.jpg +n001945/0425_01.jpg +n001946/0051_01.jpg +n001946/0116_01.jpg +n001946/0117_01.jpg +n001946/0121_02.jpg +n001946/0133_01.jpg +n001946/0158_02.jpg +n001946/0244_02.jpg +n001946/0304_02.jpg +n001947/0194_01.jpg +n001947/0311_01.jpg +n001947/0356_01.jpg +n001948/0086_01.jpg +n001948/0126_01.jpg +n001948/0162_02.jpg +n001948/0177_01.jpg +n001948/0211_05.jpg +n001948/0221_02.jpg +n001948/0230_01.jpg +n001948/0294_01.jpg +n001949/0165_01.jpg +n001949/0289_01.jpg +n001949/0418_01.jpg +n001950/0014_01.jpg +n001950/0051_02.jpg +n001950/0086_01.jpg +n001950/0104_01.jpg +n001950/0267_06.jpg +n001950/0331_01.jpg +n001950/0398_01.jpg +n001951/0214_01.jpg +n001951/0231_01.jpg +n001951/0261_01.jpg +n001951/0318_01.jpg +n001951/0309_01.jpg +n001952/0063_01.jpg +n001952/0121_01.jpg +n001953/0213_01.jpg +n001953/0226_03.jpg +n001953/0226_04.jpg +n001953/0262_01.jpg +n001954/0034_01.jpg +n001954/0059_02.jpg +n001954/0296_01.jpg +n001954/0364_01.jpg +n001955/0007_01.jpg +n001955/0030_01.jpg +n001955/0036_01.jpg +n001955/0068_01.jpg +n001955/0076_03.jpg +n001955/0083_01.jpg +n001955/0089_01.jpg +n001955/0090_01.jpg +n001955/0105_01.jpg +n001955/0121_01.jpg +n001955/0144_01.jpg +n001955/0195_02.jpg +n001955/0225_01.jpg +n001955/0227_01.jpg +n001955/0301_01.jpg +n001955/0336_04.jpg +n001955/0352_01.jpg +n001955/0356_02.jpg +n001955/0386_01.jpg +n001957/0229_01.jpg +n001957/0319_02.jpg +n001958/0029_01.jpg +n001958/0107_02.jpg +n001958/0125_02.jpg +n001958/0127_01.jpg +n001958/0140_02.jpg +n001958/0152_01.jpg +n001958/0221_01.jpg +n001958/0236_01.jpg +n001958/0237_01.jpg +n001958/0242_05.jpg +n001958/0244_02.jpg +n001958/0300_02.jpg +n001958/0350_01.jpg +n001958/0360_01.jpg +n001959/0008_01.jpg +n001959/0084_03.jpg +n001959/0127_01.jpg +n001959/0144_01.jpg +n001959/0225_01.jpg +n001959/0239_01.jpg +n001959/0288_02.jpg +n001959/0301_04.jpg +n001959/0302_02.jpg +n001959/0308_01.jpg +n001959/0468_01.jpg +n001960/0024_01.jpg +n001960/0083_01.jpg +n001960/0093_01.jpg +n001960/0122_02.jpg +n001960/0123_01.jpg +n001960/0248_01.jpg +n001960/0367_01.jpg +n001960/0372_02.jpg +n001960/0381_01.jpg +n001960/0383_01.jpg +n001960/0424_02.jpg +n001960/0465_01.jpg +n001961/0069_01.jpg +n001961/0104_01.jpg +n001961/0127_01.jpg +n001961/0131_02.jpg +n001961/0237_01.jpg +n001961/0363_01.jpg +n001961/0432_02.jpg +n001961/0479_02.jpg +n001961/0645_01.jpg +n001961/0650_02.jpg +n001962/0086_01.jpg +n001962/0195_01.jpg +n001962/0221_02.jpg +n001963/0278_02.jpg +n001963/0303_02.jpg +n001963/0374_01.jpg +n001963/0401_01.jpg +n001964/0004_01.jpg +n001964/0027_02.jpg +n001964/0049_02.jpg +n001964/0054_01.jpg +n001964/0106_01.jpg +n001964/0124_01.jpg +n001964/0141_01.jpg +n001964/0173_01.jpg +n001964/0182_01.jpg +n001964/0251_01.jpg +n001964/0269_01.jpg +n001964/0270_02.jpg +n001964/0296_02.jpg +n001965/0303_01.jpg +n001966/0042_05.jpg +n001966/0159_01.jpg +n001966/0292_02.jpg +n001966/0439_02.jpg +n001966/0480_02.jpg +n001967/0068_01.jpg +n001968/0001_01.jpg +n001968/0012_06.jpg +n001968/0024_01.jpg +n001968/0030_07.jpg +n001968/0083_01.jpg +n001968/0095_02.jpg +n001968/0142_01.jpg +n001968/0172_05.jpg +n001968/0293_01.jpg +n001968/0304_01.jpg +n001968/0356_03.jpg +n001970/0006_01.jpg +n001970/0056_02.jpg +n001970/0134_01.jpg +n001970/0155_01.jpg +n001970/0170_01.jpg +n001970/0173_01.jpg +n001970/0177_01.jpg +n001970/0184_01.jpg +n001970/0219_01.jpg +n001970/0245_01.jpg +n001970/0296_02.jpg +n001970/0305_01.jpg +n001970/0320_01.jpg +n001970/0332_01.jpg +n001970/0341_01.jpg +n001970/0348_01.jpg +n001970/0369_01.jpg +n001970/0372_02.jpg +n001970/0376_01.jpg +n001971/0249_01.jpg +n001972/0075_01.jpg +n001972/0101_01.jpg +n001972/0103_02.jpg +n001972/0118_02.jpg +n001972/0165_03.jpg +n001972/0269_01.jpg +n001972/0316_02.jpg +n001972/0409_01.jpg +n001973/0056_01.jpg +n001973/0110_02.jpg +n001973/0144_01.jpg +n001973/0184_01.jpg +n001973/0207_01.jpg +n001973/0223_02.jpg +n001973/0621_01.jpg +n001974/0061_05.jpg +n001974/0106_01.jpg +n001974/0123_01.jpg +n001974/0209_01.jpg +n001974/0316_01.jpg +n001974/0317_01.jpg +n001974/0428_01.jpg +n001974/0461_01.jpg +n001974/0462_01.jpg +n001974/0504_01.jpg +n001974/0529_01.jpg +n001975/0313_01.jpg +n001975/0445_01.jpg +n001975/0454_02.jpg +n001978/0006_01.jpg +n001978/0008_01.jpg +n001978/0022_01.jpg +n001978/0030_01.jpg +n001978/0037_01.jpg +n001978/0039_05.jpg +n001978/0045_01.jpg +n001978/0052_01.jpg +n001978/0055_01.jpg +n001978/0100_01.jpg +n001978/0116_02.jpg +n001978/0160_01.jpg +n001978/0230_01.jpg +n001979/0079_03.jpg +n001979/0084_02.jpg +n001979/0100_02.jpg +n001979/0225_01.jpg +n001979/0454_01.jpg +n001979/0517_01.jpg +n001980/0049_02.jpg +n001980/0063_02.jpg +n001980/0094_01.jpg +n001980/0105_01.jpg +n001980/0119_01.jpg +n001980/0126_01.jpg +n001980/0167_01.jpg +n001980/0168_01.jpg +n001980/0181_02.jpg +n001980/0211_01.jpg +n001980/0337_01.jpg +n001980/0374_01.jpg +n001980/0410_01.jpg +n001980/0425_02.jpg +n001981/0293_03.jpg +n001982/0096_01.jpg +n001982/0097_01.jpg +n001982/0099_02.jpg +n001982/0129_01.jpg +n001982/0240_02.jpg +n001982/0320_02.jpg +n001983/0032_01.jpg +n001983/0204_01.jpg +n001984/0018_01.jpg +n001984/0076_03.jpg +n001984/0168_01.jpg +n001984/0196_01.jpg +n001985/0016_01.jpg +n001985/0069_01.jpg +n001985/0088_01.jpg +n001985/0094_01.jpg +n001985/0116_01.jpg +n001985/0117_01.jpg +n001985/0178_01.jpg +n001985/0194_01.jpg +n001985/0260_02.jpg +n001985/0283_02.jpg +n001985/0294_01.jpg +n001985/0322_03.jpg +n001985/0328_01.jpg +n001985/0340_02.jpg +n001986/0007_01.jpg +n001986/0046_01.jpg +n001986/0093_01.jpg +n001986/0119_01.jpg +n001986/0131_01.jpg +n001986/0147_02.jpg +n001986/0161_01.jpg +n001986/0167_01.jpg +n001986/0200_03.jpg +n001986/0228_01.jpg +n001986/0233_02.jpg +n001986/0254_01.jpg +n001986/0254_03.jpg +n001986/0296_02.jpg +n001986/0325_01.jpg +n001986/0431_01.jpg +n001987/0160_01.jpg +n001987/0182_01.jpg +n001987/0380_01.jpg +n001988/0053_01.jpg +n001988/0056_01.jpg +n001988/0087_01.jpg +n001988/0181_01.jpg +n001988/0182_01.jpg +n001988/0194_01.jpg +n001988/0249_03.jpg +n001988/0297_02.jpg +n001989/0074_02.jpg +n001989/0101_02.jpg +n001989/0135_01.jpg +n001989/0216_01.jpg +n001989/0241_01.jpg +n001989/0353_01.jpg +n001990/0144_02.jpg +n001991/0081_01.jpg +n001991/0183_01.jpg +n001991/0435_01.jpg +n001992/0007_02.jpg +n001992/0047_01.jpg +n001992/0117_01.jpg +n001992/0223_01.jpg +n001992/0233_01.jpg +n001992/0259_01.jpg +n001992/0374_01.jpg +n001993/0092_02.jpg +n001993/0112_01.jpg +n001993/0180_01.jpg +n001993/0187_01.jpg +n001993/0236_01.jpg +n001993/0239_01.jpg +n001993/0301_01.jpg +n001994/0013_02.jpg +n001994/0299_01.jpg +n001995/0136_01.jpg +n001995/0173_01.jpg +n001995/0184_02.jpg +n001995/0188_01.jpg +n001995/0225_01.jpg +n001995/0230_02.jpg +n001995/0638_03.jpg +n001995/0645_08.jpg +n001996/0022_02.jpg +n001996/0121_02.jpg +n001996/0193_01.jpg +n001996/0209_01.jpg +n001996/0297_01.jpg +n001996/0315_01.jpg +n001996/0328_02.jpg +n001996/0330_01.jpg +n001996/0463_01.jpg +n001998/0020_01.jpg +n001998/0091_01.jpg +n001998/0093_01.jpg +n001998/0128_02.jpg +n001998/0200_02.jpg +n001998/0639_01.jpg +n001998/0813_01.jpg +n001999/0143_01.jpg +n001999/0234_01.jpg +n001999/0255_01.jpg +n002000/0058_02.jpg +n002000/0130_01.jpg +n002000/0135_01.jpg +n002000/0160_02.jpg n002000/0130_01.jpg n002000/0135_01.jpg n002000/0160_02.jpg