Uploading all files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .dockerignore +6 -0
- .gitattributes +40 -35
- .gitignore +10 -0
- Dockerfile +33 -0
- LICENSE +21 -0
- README.md +199 -3
- assets/compare.png +3 -0
- assets/pullfig.png +3 -0
- assets/result_linemod.png +3 -0
- assets/result_ycb.png +3 -0
- build.sh +4 -0
- datasets/linemod/dataset.py +291 -0
- datasets/linemod/dataset_config/models_info.yml +15 -0
- datasets/ycb/dataset.py +289 -0
- datasets/ycb/dataset_config/classes.txt +21 -0
- datasets/ycb/dataset_config/test_data_list.txt +2949 -0
- datasets/ycb/dataset_config/train_data_list.txt +0 -0
- download.sh +29 -0
- experiments/eval_result/linemod/.gitignore +1 -0
- experiments/eval_result/ycb/Densefusion_iterative_result/.gitignore +1 -0
- experiments/eval_result/ycb/Densefusion_wo_refine_result/.gitignore +1 -0
- experiments/logs/linemod/.gitignore +1 -0
- experiments/logs/ycb/.gitignore +1 -0
- experiments/scripts/eval_linemod.sh +11 -0
- experiments/scripts/eval_ycb.sh +20 -0
- experiments/scripts/train_linemod.sh +10 -0
- experiments/scripts/train_ycb.sh +10 -0
- lib/extractors.py +145 -0
- lib/knn/Makefile +39 -0
- lib/knn/__init__.py +45 -0
- lib/knn/build/knn_cuda_kernel.so +0 -0
- lib/knn/build_ffi.py +25 -0
- lib/knn/knn_pytorch/__init__.py +15 -0
- lib/knn/knn_pytorch/__pycache__/__init__.cpython-35.pyc +0 -0
- lib/knn/knn_pytorch/__pycache__/__init__.cpython-36.pyc +0 -0
- lib/knn/knn_pytorch/_knn_pytorch.so +3 -0
- lib/knn/src/knn_cuda_kernel.cu +259 -0
- lib/knn/src/knn_cuda_kernel.h +22 -0
- lib/knn/src/knn_pytorch.c +48 -0
- lib/knn/src/knn_pytorch.h +2 -0
- lib/loss.py +83 -0
- lib/loss_refiner.py +76 -0
- lib/network.py +206 -0
- lib/pspnet.py +77 -0
- lib/transformations.py +1935 -0
- lib/utils.py +15 -0
- replace_ycb_toolbox/evaluate_poses_keyframe.m +217 -0
- replace_ycb_toolbox/plot_accuracy_keyframe.m +170 -0
- run.sh +29 -0
- tools/_init_paths.py +3 -0
.dockerignore
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.zip
|
2 |
+
.git
|
3 |
+
datasets/linemod/Linemod_preprocessed
|
4 |
+
datasets/ycb/YCB_Video_Dataset
|
5 |
+
*.pth
|
6 |
+
YCB_Video_toolbox/*
|
.gitattributes
CHANGED
@@ -1,35 +1,40 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
assets/compare.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
assets/pullfig.png filter=lfs diff=lfs merge=lfs -text
|
38 |
+
assets/result_linemod.png filter=lfs diff=lfs merge=lfs -text
|
39 |
+
assets/result_ycb.png filter=lfs diff=lfs merge=lfs -text
|
40 |
+
lib/knn/knn_pytorch/_knn_pytorch.so filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
datasets/linemod/Linemod_preprocessed
|
2 |
+
datasets/ycb/YCB_Video_Dataset
|
3 |
+
*.zip
|
4 |
+
*__pycache__
|
5 |
+
experiments/logs/*
|
6 |
+
*.pyc
|
7 |
+
lib/knn/knn_pytorch/__pycache__/*
|
8 |
+
__init__.*
|
9 |
+
*.pth
|
10 |
+
YCB_Video_toolbox
|
Dockerfile
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvidia/cudagl:9.0-devel-ubuntu16.04
|
2 |
+
|
3 |
+
ARG DEBIAN_FRONTEND=noninteractive
|
4 |
+
|
5 |
+
# Essentials: developer tools, build tools, OpenBLAS
|
6 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
7 |
+
apt-utils git curl vim unzip openssh-client wget \
|
8 |
+
build-essential cmake \
|
9 |
+
libopenblas-dev \
|
10 |
+
libglib2.0-0 \
|
11 |
+
libsm6 \
|
12 |
+
libxext6 \
|
13 |
+
libxrender-dev
|
14 |
+
|
15 |
+
# Python 3.5
|
16 |
+
RUN apt-get update && apt-get install -y --no-install-recommends python3.5 python3.5-dev python3-pip python3-tk && \
|
17 |
+
pip3 install --no-cache-dir --upgrade pip setuptools && \
|
18 |
+
echo "alias python='python3'" >> /root/.bash_aliases && \
|
19 |
+
echo "alias pip='pip3'" >> /root/.bash_aliases
|
20 |
+
|
21 |
+
# Science libraries and other common packages
|
22 |
+
RUN pip3 --no-cache-dir install \
|
23 |
+
numpy scipy pyyaml cffi pyyaml matplotlib Cython requests opencv-python "pillow<7"
|
24 |
+
|
25 |
+
# Tensorflow
|
26 |
+
RUN pip3 install https://download.pytorch.org/whl/cu90/torch-0.4.1-cp35-cp35m-linux_x86_64.whl && \
|
27 |
+
pip3 install torchvision==0.2.2.post3
|
28 |
+
|
29 |
+
# Expose port for TensorBoard
|
30 |
+
EXPOSE 6006
|
31 |
+
|
32 |
+
# cd to home on login
|
33 |
+
RUN echo "cd /root/dense_fusion" >> /root/.bashrc
|
LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2019 Jeremy Wang
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
README.md
CHANGED
@@ -1,3 +1,199 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DenseFusion
|
2 |
+
|
3 |
+
<p align="center">
|
4 |
+
<img src ="assets/pullfig.png" width="1000" />
|
5 |
+
</p>
|
6 |
+
|
7 |
+
## News
|
8 |
+
We have released the code and arXiv preprint for our new project [6-PACK](https://sites.google.com/view/6packtracking) which is based on this work and used for category-level 6D pose tracking.
|
9 |
+
|
10 |
+
## Table of Content
|
11 |
+
- [Overview](#overview)
|
12 |
+
- [Requirements](#requirements)
|
13 |
+
- [Code Structure](#code-structure)
|
14 |
+
- [Datasets](#datasets)
|
15 |
+
- [Training](#training)
|
16 |
+
- [Evaluation](#evaluation)
|
17 |
+
- [Evaluation on YCB_Video Dataset](#evaluation-on-ycb_video-dataset)
|
18 |
+
- [Evaluation on LineMOD Dataset](#evaluation-on-linemod-dataset)
|
19 |
+
- [Results](#results)
|
20 |
+
- [Trained Checkpoints](#trained-checkpoints)
|
21 |
+
- [Tips for your own dataset](#tips-for-your-own-dataset)
|
22 |
+
- [Citations](#citations)
|
23 |
+
- [License](#license)
|
24 |
+
|
25 |
+
## Overview
|
26 |
+
|
27 |
+
This repository is the implementation code of the paper "DenseFusion: 6D Object Pose Estimation by Iterative Dense Fusion"([arXiv](https://arxiv.org/abs/1901.04780), [Project](https://sites.google.com/view/densefusion), [Video](https://www.youtube.com/watch?v=SsE5-FuK5jo)) by Wang et al. at [Stanford Vision and Learning Lab](http://svl.stanford.edu/) and [Stanford People, AI & Robots Group](http://pair.stanford.edu/). The model takes an RGB-D image as input and predicts the 6D pose of the each object in the frame. This network is implemented using [PyTorch](https://pytorch.org/) and the rest of the framework is in Python. Since this project focuses on the 6D pose estimation process, we do not specifically limit the choice of the segmentation models. You can choose your preferred semantic-segmentation/instance-segmentation methods according to your needs. In this repo, we provide our full implementation code of the DenseFusion model, Iterative Refinement model and a vanilla SegNet semantic-segmentation model used in our real-robot grasping experiment. The ROS code of the real robot grasping experiment is not included.
|
28 |
+
|
29 |
+
|
30 |
+
## Requirements
|
31 |
+
|
32 |
+
* Python 2.7/3.5/3.6 (If you want to use Python2.7 to run this repo, please rebuild the `lib/knn/` (with PyTorch 0.4.1).)
|
33 |
+
* [PyTorch 0.4.1](https://pytorch.org/) ([PyTroch 1.0 branch](<https://github.com/j96w/DenseFusion/tree/Pytorch-1.0>))
|
34 |
+
* PIL
|
35 |
+
* scipy
|
36 |
+
* numpy
|
37 |
+
* pyyaml
|
38 |
+
* logging
|
39 |
+
* matplotlib
|
40 |
+
* CUDA 7.5/8.0/9.0 (Required. CPU-only will lead to extreme slow training speed because of the loss calculation of the symmetry objects (pixel-wise nearest neighbour loss).)
|
41 |
+
|
42 |
+
## Code Structure
|
43 |
+
* **datasets**
|
44 |
+
* **datasets/ycb**
|
45 |
+
* **datasets/ycb/dataset.py**: Data loader for YCB_Video dataset.
|
46 |
+
* **datasets/ycb/dataset_config**
|
47 |
+
* **datasets/ycb/dataset_config/classes.txt**: Object list of YCB_Video dataset.
|
48 |
+
* **datasets/ycb/dataset_config/train_data_list.txt**: Training set of YCB_Video dataset.
|
49 |
+
* **datasets/ycb/dataset_config/test_data_list.txt**: Testing set of YCB_Video dataset.
|
50 |
+
* **datasets/linemod**
|
51 |
+
* **datasets/linemod/dataset.py**: Data loader for LineMOD dataset.
|
52 |
+
* **datasets/linemod/dataset_config**:
|
53 |
+
* **datasets/linemod/dataset_config/models_info.yml**: Object model info of LineMOD dataset.
|
54 |
+
* **replace_ycb_toolbox**: Replacement codes for the evaluation with [YCB_Video_toolbox](https://github.com/yuxng/YCB_Video_toolbox).
|
55 |
+
* **trained_models**
|
56 |
+
* **trained_models/ycb**: Checkpoints of YCB_Video dataset.
|
57 |
+
* **trained_models/linemod**: Checkpoints of LineMOD dataset.
|
58 |
+
* **lib**
|
59 |
+
* **lib/loss.py**: Loss calculation for DenseFusion model.
|
60 |
+
* **lib/loss_refiner.py**: Loss calculation for iterative refinement model.
|
61 |
+
* **lib/transformations.py**: [Transformation Function Library](https://www.lfd.uci.edu/~gohlke/code/transformations.py.html).
|
62 |
+
* **lib/network.py**: Network architecture.
|
63 |
+
* **lib/extractors.py**: Encoder network architecture adapted from [pspnet-pytorch](https://github.com/Lextal/pspnet-pytorch).
|
64 |
+
* **lib/pspnet.py**: Decoder network architecture.
|
65 |
+
* **lib/utils.py**: Logger code.
|
66 |
+
* **lib/knn/**: CUDA K-nearest neighbours library adapted from [pytorch_knn_cuda](https://github.com/chrischoy/pytorch_knn_cuda).
|
67 |
+
* **tools**
|
68 |
+
* **tools/_init_paths.py**: Add local path.
|
69 |
+
* **tools/eval_ycb.py**: Evaluation code for YCB_Video dataset.
|
70 |
+
* **tools/eval_linemod.py**: Evaluation code for LineMOD dataset.
|
71 |
+
* **tools/train.py**: Training code for YCB_Video dataset and LineMOD dataset.
|
72 |
+
* **experiments**
|
73 |
+
* **experiments/eval_result**
|
74 |
+
* **experiments/eval_result/ycb**
|
75 |
+
* **experiments/eval_result/ycb/Densefusion_wo_refine_result**: Evaluation result on YCB_Video dataset without refinement.
|
76 |
+
* **experiments/eval_result/ycb/Densefusion_iterative_result**: Evaluation result on YCB_Video dataset with iterative refinement.
|
77 |
+
* **experiments/eval_result/linemod**: Evaluation results on LineMOD dataset with iterative refinement.
|
78 |
+
* **experiments/logs/**: Training log files.
|
79 |
+
* **experiments/scripts**
|
80 |
+
* **experiments/scripts/train_ycb.sh**: Training script on the YCB_Video dataset.
|
81 |
+
* **experiments/scripts/train_linemod.sh**: Training script on the LineMOD dataset.
|
82 |
+
* **experiments/scripts/eval_ycb.sh**: Evaluation script on the YCB_Video dataset.
|
83 |
+
* **experiments/scripts/eval_linemod.sh**: Evaluation script on the LineMOD dataset.
|
84 |
+
* **download.sh**: Script for downloading YCB_Video Dataset, preprocessed LineMOD dataset and the trained checkpoints.
|
85 |
+
|
86 |
+
|
87 |
+
## Datasets
|
88 |
+
|
89 |
+
This work is tested on two 6D object pose estimation datasets:
|
90 |
+
|
91 |
+
* [YCB_Video Dataset](https://rse-lab.cs.washington.edu/projects/posecnn/): Training and Testing sets follow [PoseCNN](https://arxiv.org/abs/1711.00199). The training set includes 80 training videos 0000-0047 & 0060-0091 (choosen by 7 frame as a gap in our training) and synthetic data 000000-079999. The testing set includes 2949 keyframes from 10 testing videos 0048-0059.
|
92 |
+
|
93 |
+
* [LineMOD](http://campar.in.tum.de/Main/StefanHinterstoisser): Download the [preprocessed LineMOD dataset](https://drive.google.com/drive/folders/19ivHpaKm9dOrr12fzC8IDFczWRPFxho7) (including the testing results outputted by the trained vanilla SegNet used for evaluation).
|
94 |
+
|
95 |
+
Download YCB_Video Dataset, preprocessed LineMOD dataset and the trained checkpoints (You can modify this script according to your needs.):
|
96 |
+
```
|
97 |
+
./download.sh
|
98 |
+
```
|
99 |
+
|
100 |
+
## Training
|
101 |
+
|
102 |
+
* YCB_Video Dataset:
|
103 |
+
After you have downloaded and unzipped the YCB_Video_Dataset.zip and installed all the dependency packages, please run:
|
104 |
+
```
|
105 |
+
./experiments/scripts/train_ycb.sh
|
106 |
+
```
|
107 |
+
* LineMOD Dataset:
|
108 |
+
After you have downloaded and unzipped the Linemod_preprocessed.zip, please run:
|
109 |
+
```
|
110 |
+
./experiments/scripts/train_linemod.sh
|
111 |
+
```
|
112 |
+
**Training Process**: The training process contains two components: (i) Training of the DenseFusion model. (ii) Training of the Iterative Refinement model. In this code, a DenseFusion model will be trained first. When the average testing distance result (ADD for non-symmetry objects, ADD-S for symmetry objects) is smaller than a certain margin, the training of the Iterative Refinement model will start automatically and the DenseFusion model will then be fixed. You can change this margin to have better DenseFusion result without refinement but it's inferior than the final result after the iterative refinement.
|
113 |
+
|
114 |
+
**Checkpoints and Resuming**: After the training of each 1000 batches, a `pose_model_current.pth` / `pose_refine_model_current.pth` checkpoint will be saved. You can use it to resume the training. After each testing epoch, if the average distance result is the best so far, a `pose_model_(epoch)_(best_score).pth` / `pose_model_refiner_(epoch)_(best_score).pth` checkpoint will be saved. You can use it for the evaluation.
|
115 |
+
|
116 |
+
**Notice**: The training of the iterative refinement model takes some time. Please be patient and the improvement will come after about 30 epoches.
|
117 |
+
|
118 |
+
|
119 |
+
* vanilla SegNet:
|
120 |
+
Just run:
|
121 |
+
```
|
122 |
+
cd vanilla_segmentation/
|
123 |
+
python train.py --dataset_root=./datasets/ycb/YCB_Video_Dataset
|
124 |
+
```
|
125 |
+
To make the best use of the training set, several data augementation techniques are used in this code:
|
126 |
+
|
127 |
+
(1) A random noise is added to the brightness, contrast and saturation of the input RGB image with the `torchvision.transforms.ColorJitter` function, where we set the function as `torchvision.transforms.ColorJitter(0.2, 0.2, 0.2, 0.05)`.
|
128 |
+
|
129 |
+
(2) A random pose translation noise is added to the training set of the pose estimator, where we set the range of the translation noise to 3cm for both datasets.
|
130 |
+
|
131 |
+
(3) For the YCB_Video dataset, since the synthetic data is not contain background. We randomly select the real training data as the background. In each frame, we also randomly select two instances segmentation clips from another synthetic training image to mask at the front of the input RGB-D image, so that more occlusion situations can be generated.
|
132 |
+
|
133 |
+
## Evaluation
|
134 |
+
|
135 |
+
### Evaluation on YCB_Video Dataset
|
136 |
+
For fair comparison, we use the same segmentation results of [PoseCNN](https://rse-lab.cs.washington.edu/projects/posecnn/) and compare with their results after ICP refinement.
|
137 |
+
Please run:
|
138 |
+
```
|
139 |
+
./experiments/scripts/eval_ycb.sh
|
140 |
+
```
|
141 |
+
This script will first download the `YCB_Video_toolbox` to the root folder of this repo and test the selected DenseFusion and Iterative Refinement models on the 2949 keyframes of the 10 testing video in YCB_Video Dataset with the same segmentation result of PoseCNN. The result without refinement is stored in `experiments/eval_result/ycb/Densefusion_wo_refine_result` and the refined result is in `experiments/eval_result/ycb/Densefusion_iterative_result`.
|
142 |
+
|
143 |
+
After that, you can add the path of `experiments/eval_result/ycb/Densefusion_wo_refine_result/` and `experiments/eval_result/ycb/Densefusion_iterative_result/` to the code `YCB_Video_toolbox/evaluate_poses_keyframe.m` and run it with [MATLAB](https://www.mathworks.com/products/matlab.html). The code `YCB_Video_toolbox/plot_accuracy_keyframe.m` can show you the comparsion plot result. You can easily make it by copying the adapted codes from the `replace_ycb_toolbox/` folder and replace them in the `YCB_Video_toolbox/` folder. But you might still need to change the path of your `YCB_Video Dataset/` in the `globals.m` and copy two result folders(`Densefusion_wo_refine_result/` and `Densefusion_iterative_result/`) to the `YCB_Video_toolbox/` folder.
|
144 |
+
|
145 |
+
|
146 |
+
### Evaluation on LineMOD Dataset
|
147 |
+
Just run:
|
148 |
+
```
|
149 |
+
./experiments/scripts/eval_linemod.sh
|
150 |
+
```
|
151 |
+
This script will test the models on the testing set of the LineMOD dataset with the masks outputted by the trained vanilla SegNet model. The result will be printed at the end of the execution and saved as a log in `experiments/eval_result/linemod/`.
|
152 |
+
|
153 |
+
|
154 |
+
## Results
|
155 |
+
|
156 |
+
* YCB_Video Dataset:
|
157 |
+
|
158 |
+
Quantitative evaluation result with ADD-S metric compared to other RGB-D methods. `Ours(per-pixel)` is the result of the DenseFusion model without refinement and `Ours(iterative)` is the result with iterative refinement.
|
159 |
+
|
160 |
+
<p align="center">
|
161 |
+
<img src ="assets/result_ycb.png" width="600" />
|
162 |
+
</p>
|
163 |
+
|
164 |
+
**Important!** Before you use these numbers to compare with your methods, please make sure one important issus: One difficulty for testing on the YCB_Video Dataset is how to let the network to tell the difference between the object `051_large_clamp` and `052_extra_large_clamp`. The result of all the approaches in this table uses the same segmentation masks released by PoseCNN without any detection priors, so all of them suffer a performance drop on these two objects because of the poor detection result and this drop is also added to the final overall score. If you have added detection priors to your detector to distinguish these two objects, please clarify or do not copy the overall score for comparsion experiments.
|
165 |
+
|
166 |
+
* LineMOD Dataset:
|
167 |
+
|
168 |
+
Quantitative evaluation result with ADD metric for non-symmetry objects and ADD-S for symmetry objects(eggbox, glue) compared to other RGB-D methods. High performance RGB methods are also listed for reference.
|
169 |
+
|
170 |
+
<p align="center">
|
171 |
+
<img src ="assets/result_linemod.png" width="500" />
|
172 |
+
</p>
|
173 |
+
|
174 |
+
The qualitative result on the YCB_Video dataset.
|
175 |
+
|
176 |
+
<p align="center">
|
177 |
+
<img src ="assets/compare.png" width="600" />
|
178 |
+
</p>
|
179 |
+
|
180 |
+
## Trained Checkpoints
|
181 |
+
You can download the trained DenseFusion and Iterative Refinement checkpoints of both datasets from [Link](https://drive.google.com/drive/folders/19ivHpaKm9dOrr12fzC8IDFczWRPFxho7).
|
182 |
+
|
183 |
+
## Tips for your own dataset
|
184 |
+
As you can see in this repo, the network code and the hyperparameters (lr and w) remain the same for both datasets. Which means you might not need to adjust too much on the network structure and hyperparameters when you use this repo on your own dataset. Please make sure that the distance metric in your dataset should be converted to meter, otherwise the hyperparameter w need to be adjusted. Several useful tools including [LabelFusion](https://github.com/RobotLocomotion/LabelFusion) and [sixd_toolkit](https://github.com/thodan/sixd_toolkit) has been tested to work well. (Please make sure to turn on the depth image collection in LabelFusion when you use it.)
|
185 |
+
|
186 |
+
|
187 |
+
## Citations
|
188 |
+
Please cite [DenseFusion](https://sites.google.com/view/densefusion) if you use this repository in your publications:
|
189 |
+
```
|
190 |
+
@article{wang2019densefusion,
|
191 |
+
title={DenseFusion: 6D Object Pose Estimation by Iterative Dense Fusion},
|
192 |
+
author={Wang, Chen and Xu, Danfei and Zhu, Yuke and Mart{\'\i}n-Mart{\'\i}n, Roberto and Lu, Cewu and Fei-Fei, Li and Savarese, Silvio},
|
193 |
+
booktitle={Computer Vision and Pattern Recognition (CVPR)},
|
194 |
+
year={2019}
|
195 |
+
}
|
196 |
+
```
|
197 |
+
|
198 |
+
## License
|
199 |
+
Licensed under the [MIT License](LICENSE)
|
assets/compare.png
ADDED
![]() |
Git LFS Details
|
assets/pullfig.png
ADDED
![]() |
Git LFS Details
|
assets/result_linemod.png
ADDED
![]() |
Git LFS Details
|
assets/result_ycb.png
ADDED
![]() |
Git LFS Details
|
build.sh
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
#docker build --no-cache -t hri -f Dockerfile .
|
4 |
+
docker build -t dense_fusion -f Dockerfile .
|
datasets/linemod/dataset.py
ADDED
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch.utils.data as data
|
2 |
+
from PIL import Image
|
3 |
+
import os
|
4 |
+
import os.path
|
5 |
+
import errno
|
6 |
+
import torch
|
7 |
+
import json
|
8 |
+
import codecs
|
9 |
+
import numpy as np
|
10 |
+
import sys
|
11 |
+
import torchvision.transforms as transforms
|
12 |
+
import argparse
|
13 |
+
import json
|
14 |
+
import time
|
15 |
+
import random
|
16 |
+
import numpy.ma as ma
|
17 |
+
import copy
|
18 |
+
import scipy.misc
|
19 |
+
import scipy.io as scio
|
20 |
+
import yaml
|
21 |
+
import cv2
|
22 |
+
|
23 |
+
|
24 |
+
class PoseDataset(data.Dataset):
|
25 |
+
def __init__(self, mode, num, add_noise, root, noise_trans, refine):
|
26 |
+
self.objlist = [1, 2, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15]
|
27 |
+
self.mode = mode
|
28 |
+
|
29 |
+
self.list_rgb = []
|
30 |
+
self.list_depth = []
|
31 |
+
self.list_label = []
|
32 |
+
self.list_obj = []
|
33 |
+
self.list_rank = []
|
34 |
+
self.meta = {}
|
35 |
+
self.pt = {}
|
36 |
+
self.root = root
|
37 |
+
self.noise_trans = noise_trans
|
38 |
+
self.refine = refine
|
39 |
+
|
40 |
+
item_count = 0
|
41 |
+
for item in self.objlist:
|
42 |
+
if self.mode == 'train':
|
43 |
+
input_file = open('{0}/data/{1}/train.txt'.format(self.root, '%02d' % item))
|
44 |
+
else:
|
45 |
+
input_file = open('{0}/data/{1}/test.txt'.format(self.root, '%02d' % item))
|
46 |
+
while 1:
|
47 |
+
item_count += 1
|
48 |
+
input_line = input_file.readline()
|
49 |
+
if self.mode == 'test' and item_count % 10 != 0:
|
50 |
+
continue
|
51 |
+
if not input_line:
|
52 |
+
break
|
53 |
+
if input_line[-1:] == '\n':
|
54 |
+
input_line = input_line[:-1]
|
55 |
+
self.list_rgb.append('{0}/data/{1}/rgb/{2}.png'.format(self.root, '%02d' % item, input_line))
|
56 |
+
self.list_depth.append('{0}/data/{1}/depth/{2}.png'.format(self.root, '%02d' % item, input_line))
|
57 |
+
if self.mode == 'eval':
|
58 |
+
self.list_label.append('{0}/segnet_results/{1}_label/{2}_label.png'.format(self.root, '%02d' % item, input_line))
|
59 |
+
else:
|
60 |
+
self.list_label.append('{0}/data/{1}/mask/{2}.png'.format(self.root, '%02d' % item, input_line))
|
61 |
+
|
62 |
+
self.list_obj.append(item)
|
63 |
+
self.list_rank.append(int(input_line))
|
64 |
+
|
65 |
+
meta_file = open('{0}/data/{1}/gt.yml'.format(self.root, '%02d' % item), 'r')
|
66 |
+
self.meta[item] = yaml.load(meta_file)
|
67 |
+
self.pt[item] = ply_vtx('{0}/models/obj_{1}.ply'.format(self.root, '%02d' % item))
|
68 |
+
|
69 |
+
print("Object {0} buffer loaded".format(item))
|
70 |
+
|
71 |
+
self.length = len(self.list_rgb)
|
72 |
+
|
73 |
+
self.cam_cx = 325.26110
|
74 |
+
self.cam_cy = 242.04899
|
75 |
+
self.cam_fx = 572.41140
|
76 |
+
self.cam_fy = 573.57043
|
77 |
+
|
78 |
+
self.xmap = np.array([[j for i in range(640)] for j in range(480)])
|
79 |
+
self.ymap = np.array([[i for i in range(640)] for j in range(480)])
|
80 |
+
|
81 |
+
self.num = num
|
82 |
+
self.add_noise = add_noise
|
83 |
+
self.trancolor = transforms.ColorJitter(0.2, 0.2, 0.2, 0.05)
|
84 |
+
self.norm = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
85 |
+
self.border_list = [-1, 40, 80, 120, 160, 200, 240, 280, 320, 360, 400, 440, 480, 520, 560, 600, 640, 680]
|
86 |
+
self.num_pt_mesh_large = 500
|
87 |
+
self.num_pt_mesh_small = 500
|
88 |
+
self.symmetry_obj_idx = [7, 8]
|
89 |
+
|
90 |
+
def __getitem__(self, index):
|
91 |
+
img = Image.open(self.list_rgb[index])
|
92 |
+
ori_img = np.array(img)
|
93 |
+
depth = np.array(Image.open(self.list_depth[index]))
|
94 |
+
label = np.array(Image.open(self.list_label[index]))
|
95 |
+
obj = self.list_obj[index]
|
96 |
+
rank = self.list_rank[index]
|
97 |
+
|
98 |
+
if obj == 2:
|
99 |
+
for i in range(0, len(self.meta[obj][rank])):
|
100 |
+
if self.meta[obj][rank][i]['obj_id'] == 2:
|
101 |
+
meta = self.meta[obj][rank][i]
|
102 |
+
break
|
103 |
+
else:
|
104 |
+
meta = self.meta[obj][rank][0]
|
105 |
+
|
106 |
+
mask_depth = ma.getmaskarray(ma.masked_not_equal(depth, 0))
|
107 |
+
if self.mode == 'eval':
|
108 |
+
mask_label = ma.getmaskarray(ma.masked_equal(label, np.array(255)))
|
109 |
+
else:
|
110 |
+
mask_label = ma.getmaskarray(ma.masked_equal(label, np.array([255, 255, 255])))[:, :, 0]
|
111 |
+
|
112 |
+
mask = mask_label * mask_depth
|
113 |
+
|
114 |
+
if self.add_noise:
|
115 |
+
img = self.trancolor(img)
|
116 |
+
|
117 |
+
img = np.array(img)[:, :, :3]
|
118 |
+
img = np.transpose(img, (2, 0, 1))
|
119 |
+
img_masked = img
|
120 |
+
|
121 |
+
if self.mode == 'eval':
|
122 |
+
rmin, rmax, cmin, cmax = get_bbox(mask_to_bbox(mask_label))
|
123 |
+
else:
|
124 |
+
rmin, rmax, cmin, cmax = get_bbox(meta['obj_bb'])
|
125 |
+
|
126 |
+
img_masked = img_masked[:, rmin:rmax, cmin:cmax]
|
127 |
+
#p_img = np.transpose(img_masked, (1, 2, 0))
|
128 |
+
#scipy.misc.imsave('evaluation_result/{0}_input.png'.format(index), p_img)
|
129 |
+
|
130 |
+
target_r = np.resize(np.array(meta['cam_R_m2c']), (3, 3))
|
131 |
+
target_t = np.array(meta['cam_t_m2c'])
|
132 |
+
add_t = np.array([random.uniform(-self.noise_trans, self.noise_trans) for i in range(3)])
|
133 |
+
|
134 |
+
choose = mask[rmin:rmax, cmin:cmax].flatten().nonzero()[0]
|
135 |
+
if len(choose) == 0:
|
136 |
+
cc = torch.LongTensor([0])
|
137 |
+
return(cc, cc, cc, cc, cc, cc)
|
138 |
+
|
139 |
+
if len(choose) > self.num:
|
140 |
+
c_mask = np.zeros(len(choose), dtype=int)
|
141 |
+
c_mask[:self.num] = 1
|
142 |
+
np.random.shuffle(c_mask)
|
143 |
+
choose = choose[c_mask.nonzero()]
|
144 |
+
else:
|
145 |
+
choose = np.pad(choose, (0, self.num - len(choose)), 'wrap')
|
146 |
+
|
147 |
+
depth_masked = depth[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
148 |
+
xmap_masked = self.xmap[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
149 |
+
ymap_masked = self.ymap[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
150 |
+
choose = np.array([choose])
|
151 |
+
|
152 |
+
cam_scale = 1.0
|
153 |
+
pt2 = depth_masked / cam_scale
|
154 |
+
pt0 = (ymap_masked - self.cam_cx) * pt2 / self.cam_fx
|
155 |
+
pt1 = (xmap_masked - self.cam_cy) * pt2 / self.cam_fy
|
156 |
+
cloud = np.concatenate((pt0, pt1, pt2), axis=1)
|
157 |
+
cloud = cloud / 1000.0
|
158 |
+
|
159 |
+
if self.add_noise:
|
160 |
+
cloud = np.add(cloud, add_t)
|
161 |
+
|
162 |
+
#fw = open('evaluation_result/{0}_cld.xyz'.format(index), 'w')
|
163 |
+
#for it in cloud:
|
164 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
165 |
+
#fw.close()
|
166 |
+
|
167 |
+
model_points = self.pt[obj] / 1000.0
|
168 |
+
dellist = [j for j in range(0, len(model_points))]
|
169 |
+
dellist = random.sample(dellist, len(model_points) - self.num_pt_mesh_small)
|
170 |
+
model_points = np.delete(model_points, dellist, axis=0)
|
171 |
+
|
172 |
+
#fw = open('evaluation_result/{0}_model_points.xyz'.format(index), 'w')
|
173 |
+
#for it in model_points:
|
174 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
175 |
+
#fw.close()
|
176 |
+
|
177 |
+
target = np.dot(model_points, target_r.T)
|
178 |
+
if self.add_noise:
|
179 |
+
target = np.add(target, target_t / 1000.0 + add_t)
|
180 |
+
out_t = target_t / 1000.0 + add_t
|
181 |
+
else:
|
182 |
+
target = np.add(target, target_t / 1000.0)
|
183 |
+
out_t = target_t / 1000.0
|
184 |
+
|
185 |
+
#fw = open('evaluation_result/{0}_tar.xyz'.format(index), 'w')
|
186 |
+
#for it in target:
|
187 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
188 |
+
#fw.close()
|
189 |
+
|
190 |
+
return torch.from_numpy(cloud.astype(np.float32)), \
|
191 |
+
torch.LongTensor(choose.astype(np.int32)), \
|
192 |
+
self.norm(torch.from_numpy(img_masked.astype(np.float32))), \
|
193 |
+
torch.from_numpy(target.astype(np.float32)), \
|
194 |
+
torch.from_numpy(model_points.astype(np.float32)), \
|
195 |
+
torch.LongTensor([self.objlist.index(obj)])
|
196 |
+
|
197 |
+
def __len__(self):
|
198 |
+
return self.length
|
199 |
+
|
200 |
+
def get_sym_list(self):
|
201 |
+
return self.symmetry_obj_idx
|
202 |
+
|
203 |
+
def get_num_points_mesh(self):
|
204 |
+
if self.refine:
|
205 |
+
return self.num_pt_mesh_large
|
206 |
+
else:
|
207 |
+
return self.num_pt_mesh_small
|
208 |
+
|
209 |
+
|
210 |
+
|
211 |
+
border_list = [-1, 40, 80, 120, 160, 200, 240, 280, 320, 360, 400, 440, 480, 520, 560, 600, 640, 680]
|
212 |
+
img_width = 480
|
213 |
+
img_length = 640
|
214 |
+
|
215 |
+
|
216 |
+
def mask_to_bbox(mask):
|
217 |
+
mask = mask.astype(np.uint8)
|
218 |
+
contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
|
219 |
+
|
220 |
+
|
221 |
+
x = 0
|
222 |
+
y = 0
|
223 |
+
w = 0
|
224 |
+
h = 0
|
225 |
+
for contour in contours:
|
226 |
+
tmp_x, tmp_y, tmp_w, tmp_h = cv2.boundingRect(contour)
|
227 |
+
if tmp_w * tmp_h > w * h:
|
228 |
+
x = tmp_x
|
229 |
+
y = tmp_y
|
230 |
+
w = tmp_w
|
231 |
+
h = tmp_h
|
232 |
+
return [x, y, w, h]
|
233 |
+
|
234 |
+
|
235 |
+
def get_bbox(bbox):
|
236 |
+
bbx = [bbox[1], bbox[1] + bbox[3], bbox[0], bbox[0] + bbox[2]]
|
237 |
+
if bbx[0] < 0:
|
238 |
+
bbx[0] = 0
|
239 |
+
if bbx[1] >= 480:
|
240 |
+
bbx[1] = 479
|
241 |
+
if bbx[2] < 0:
|
242 |
+
bbx[2] = 0
|
243 |
+
if bbx[3] >= 640:
|
244 |
+
bbx[3] = 639
|
245 |
+
rmin, rmax, cmin, cmax = bbx[0], bbx[1], bbx[2], bbx[3]
|
246 |
+
r_b = rmax - rmin
|
247 |
+
for tt in range(len(border_list)):
|
248 |
+
if r_b > border_list[tt] and r_b < border_list[tt + 1]:
|
249 |
+
r_b = border_list[tt + 1]
|
250 |
+
break
|
251 |
+
c_b = cmax - cmin
|
252 |
+
for tt in range(len(border_list)):
|
253 |
+
if c_b > border_list[tt] and c_b < border_list[tt + 1]:
|
254 |
+
c_b = border_list[tt + 1]
|
255 |
+
break
|
256 |
+
center = [int((rmin + rmax) / 2), int((cmin + cmax) / 2)]
|
257 |
+
rmin = center[0] - int(r_b / 2)
|
258 |
+
rmax = center[0] + int(r_b / 2)
|
259 |
+
cmin = center[1] - int(c_b / 2)
|
260 |
+
cmax = center[1] + int(c_b / 2)
|
261 |
+
if rmin < 0:
|
262 |
+
delt = -rmin
|
263 |
+
rmin = 0
|
264 |
+
rmax += delt
|
265 |
+
if cmin < 0:
|
266 |
+
delt = -cmin
|
267 |
+
cmin = 0
|
268 |
+
cmax += delt
|
269 |
+
if rmax > 480:
|
270 |
+
delt = rmax - 480
|
271 |
+
rmax = 480
|
272 |
+
rmin -= delt
|
273 |
+
if cmax > 640:
|
274 |
+
delt = cmax - 640
|
275 |
+
cmax = 640
|
276 |
+
cmin -= delt
|
277 |
+
return rmin, rmax, cmin, cmax
|
278 |
+
|
279 |
+
|
280 |
+
def ply_vtx(path):
|
281 |
+
f = open(path)
|
282 |
+
assert f.readline().strip() == "ply"
|
283 |
+
f.readline()
|
284 |
+
f.readline()
|
285 |
+
N = int(f.readline().split()[-1])
|
286 |
+
while f.readline().strip() != "end_header":
|
287 |
+
continue
|
288 |
+
pts = []
|
289 |
+
for _ in range(N):
|
290 |
+
pts.append(np.float32(f.readline().split()[:3]))
|
291 |
+
return np.array(pts)
|
datasets/linemod/dataset_config/models_info.yml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1: {diameter: 102.09865663, min_x: -37.93430000, min_y: -38.79960000, min_z: -45.88450000, size_x: 75.86860000, size_y: 77.59920000, size_z: 91.76900000}
|
2 |
+
2: {diameter: 247.50624233, min_x: -107.83500000, min_y: -60.92790000, min_z: -109.70500000, size_x: 215.67000000, size_y: 121.85570000, size_z: 219.41000000}
|
3 |
+
3: {diameter: 167.35486092, min_x: -83.21620000, min_y: -82.65910000, min_z: -37.23640000, size_x: 166.43240000, size_y: 165.31820000, size_z: 74.47280000}
|
4 |
+
4: {diameter: 172.49224865, min_x: -68.32970000, min_y: -71.51510000, min_z: -50.24850000, size_x: 136.65940000, size_y: 143.03020000, size_z: 100.49700000}
|
5 |
+
5: {diameter: 201.40358597, min_x: -50.39580000, min_y: -90.89790000, min_z: -96.86700000, size_x: 100.79160000, size_y: 181.79580000, size_z: 193.73400000}
|
6 |
+
6: {diameter: 154.54551808, min_x: -33.50540000, min_y: -63.81650000, min_z: -58.72830000, size_x: 67.01070000, size_y: 127.63300000, size_z: 117.45660000}
|
7 |
+
7: {diameter: 124.26430816, min_x: -58.78990000, min_y: -45.75560000, min_z: -47.31120000, size_x: 117.57980000, size_y: 91.51120000, size_z: 94.62240000}
|
8 |
+
8: {diameter: 261.47178102, min_x: -114.73800000, min_y: -37.73570000, min_z: -104.00100000, size_x: 229.47600000, size_y: 75.47140000, size_z: 208.00200000}
|
9 |
+
9: {diameter: 108.99920102, min_x: -52.21460000, min_y: -38.70380000, min_z: -42.84850000, size_x: 104.42920000, size_y: 77.40760000, size_z: 85.69700000}
|
10 |
+
10: {diameter: 164.62758848, min_x: -75.09230000, min_y: -53.53750000, min_z: -34.62070000, size_x: 150.18460000, size_y: 107.07500000, size_z: 69.24140000}
|
11 |
+
11: {diameter: 175.88933422, min_x: -18.36050000, min_y: -38.93300000, min_z: -86.40790000, size_x: 36.72110000, size_y: 77.86600000, size_z: 172.81580000}
|
12 |
+
12: {diameter: 145.54287471, min_x: -50.44390000, min_y: -54.24850000, min_z: -45.40000000, size_x: 100.88780000, size_y: 108.49700000, size_z: 90.80000000}
|
13 |
+
13: {diameter: 278.07811733, min_x: -129.11300000, min_y: -59.24100000, min_z: -70.56620000, size_x: 258.22600000, size_y: 118.48210000, size_z: 141.13240000}
|
14 |
+
14: {diameter: 282.60129399, min_x: -101.57300000, min_y: -58.87630000, min_z: -106.55800000, size_x: 203.14600000, size_y: 117.75250000, size_z: 213.11600000}
|
15 |
+
15: {diameter: 212.35825148, min_x: -46.95910000, min_y: -73.71670000, min_z: -92.37370000, size_x: 93.91810000, size_y: 147.43340000, size_z: 184.74740000}
|
datasets/ycb/dataset.py
ADDED
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch.utils.data as data
|
2 |
+
from PIL import Image
|
3 |
+
import os
|
4 |
+
import os.path
|
5 |
+
import torch
|
6 |
+
import numpy as np
|
7 |
+
import torchvision.transforms as transforms
|
8 |
+
import argparse
|
9 |
+
import time
|
10 |
+
import random
|
11 |
+
from lib.transformations import quaternion_from_euler, euler_matrix, random_quaternion, quaternion_matrix
|
12 |
+
import numpy.ma as ma
|
13 |
+
import copy
|
14 |
+
import scipy.misc
|
15 |
+
import scipy.io as scio
|
16 |
+
|
17 |
+
|
18 |
+
class PoseDataset(data.Dataset):
|
19 |
+
def __init__(self, mode, num_pt, add_noise, root, noise_trans, refine):
|
20 |
+
if mode == 'train':
|
21 |
+
self.path = 'datasets/ycb/dataset_config/train_data_list.txt'
|
22 |
+
elif mode == 'test':
|
23 |
+
self.path = 'datasets/ycb/dataset_config/test_data_list.txt'
|
24 |
+
self.num_pt = num_pt
|
25 |
+
self.root = root
|
26 |
+
self.add_noise = add_noise
|
27 |
+
self.noise_trans = noise_trans
|
28 |
+
|
29 |
+
self.list = []
|
30 |
+
self.real = []
|
31 |
+
self.syn = []
|
32 |
+
input_file = open(self.path)
|
33 |
+
while 1:
|
34 |
+
input_line = input_file.readline()
|
35 |
+
if not input_line:
|
36 |
+
break
|
37 |
+
if input_line[-1:] == '\n':
|
38 |
+
input_line = input_line[:-1]
|
39 |
+
if input_line[:5] == 'data/':
|
40 |
+
self.real.append(input_line)
|
41 |
+
else:
|
42 |
+
self.syn.append(input_line)
|
43 |
+
self.list.append(input_line)
|
44 |
+
input_file.close()
|
45 |
+
|
46 |
+
self.length = len(self.list)
|
47 |
+
self.len_real = len(self.real)
|
48 |
+
self.len_syn = len(self.syn)
|
49 |
+
|
50 |
+
class_file = open('datasets/ycb/dataset_config/classes.txt')
|
51 |
+
class_id = 1
|
52 |
+
self.cld = {}
|
53 |
+
while 1:
|
54 |
+
class_input = class_file.readline()
|
55 |
+
if not class_input:
|
56 |
+
break
|
57 |
+
|
58 |
+
input_file = open('{0}/models/{1}/points.xyz'.format(self.root, class_input[:-1]))
|
59 |
+
self.cld[class_id] = []
|
60 |
+
while 1:
|
61 |
+
input_line = input_file.readline()
|
62 |
+
if not input_line:
|
63 |
+
break
|
64 |
+
input_line = input_line[:-1].split(' ')
|
65 |
+
self.cld[class_id].append([float(input_line[0]), float(input_line[1]), float(input_line[2])])
|
66 |
+
self.cld[class_id] = np.array(self.cld[class_id])
|
67 |
+
input_file.close()
|
68 |
+
|
69 |
+
class_id += 1
|
70 |
+
|
71 |
+
self.cam_cx_1 = 312.9869
|
72 |
+
self.cam_cy_1 = 241.3109
|
73 |
+
self.cam_fx_1 = 1066.778
|
74 |
+
self.cam_fy_1 = 1067.487
|
75 |
+
|
76 |
+
self.cam_cx_2 = 323.7872
|
77 |
+
self.cam_cy_2 = 279.6921
|
78 |
+
self.cam_fx_2 = 1077.836
|
79 |
+
self.cam_fy_2 = 1078.189
|
80 |
+
|
81 |
+
self.xmap = np.array([[j for i in range(640)] for j in range(480)])
|
82 |
+
self.ymap = np.array([[i for i in range(640)] for j in range(480)])
|
83 |
+
|
84 |
+
self.trancolor = transforms.ColorJitter(0.2, 0.2, 0.2, 0.05)
|
85 |
+
self.noise_img_loc = 0.0
|
86 |
+
self.noise_img_scale = 7.0
|
87 |
+
self.minimum_num_pt = 50
|
88 |
+
self.norm = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
89 |
+
self.symmetry_obj_idx = [12, 15, 18, 19, 20]
|
90 |
+
self.num_pt_mesh_small = 500
|
91 |
+
self.num_pt_mesh_large = 2600
|
92 |
+
self.refine = refine
|
93 |
+
self.front_num = 2
|
94 |
+
|
95 |
+
print(len(self.list))
|
96 |
+
|
97 |
+
def __getitem__(self, index):
|
98 |
+
img = Image.open('{0}/{1}-color.png'.format(self.root, self.list[index]))
|
99 |
+
depth = np.array(Image.open('{0}/{1}-depth.png'.format(self.root, self.list[index])))
|
100 |
+
label = np.array(Image.open('{0}/{1}-label.png'.format(self.root, self.list[index])))
|
101 |
+
meta = scio.loadmat('{0}/{1}-meta.mat'.format(self.root, self.list[index]))
|
102 |
+
|
103 |
+
if self.list[index][:8] != 'data_syn' and int(self.list[index][5:9]) >= 60:
|
104 |
+
cam_cx = self.cam_cx_2
|
105 |
+
cam_cy = self.cam_cy_2
|
106 |
+
cam_fx = self.cam_fx_2
|
107 |
+
cam_fy = self.cam_fy_2
|
108 |
+
else:
|
109 |
+
cam_cx = self.cam_cx_1
|
110 |
+
cam_cy = self.cam_cy_1
|
111 |
+
cam_fx = self.cam_fx_1
|
112 |
+
cam_fy = self.cam_fy_1
|
113 |
+
|
114 |
+
mask_back = ma.getmaskarray(ma.masked_equal(label, 0))
|
115 |
+
|
116 |
+
add_front = False
|
117 |
+
if self.add_noise:
|
118 |
+
for k in range(5):
|
119 |
+
seed = random.choice(self.syn)
|
120 |
+
front = np.array(self.trancolor(Image.open('{0}/{1}-color.png'.format(self.root, seed)).convert("RGB")))
|
121 |
+
front = np.transpose(front, (2, 0, 1))
|
122 |
+
f_label = np.array(Image.open('{0}/{1}-label.png'.format(self.root, seed)))
|
123 |
+
front_label = np.unique(f_label).tolist()[1:]
|
124 |
+
if len(front_label) < self.front_num:
|
125 |
+
continue
|
126 |
+
front_label = random.sample(front_label, self.front_num)
|
127 |
+
for f_i in front_label:
|
128 |
+
mk = ma.getmaskarray(ma.masked_not_equal(f_label, f_i))
|
129 |
+
if f_i == front_label[0]:
|
130 |
+
mask_front = mk
|
131 |
+
else:
|
132 |
+
mask_front = mask_front * mk
|
133 |
+
t_label = label * mask_front
|
134 |
+
if len(t_label.nonzero()[0]) > 1000:
|
135 |
+
label = t_label
|
136 |
+
add_front = True
|
137 |
+
break
|
138 |
+
|
139 |
+
obj = meta['cls_indexes'].flatten().astype(np.int32)
|
140 |
+
|
141 |
+
while 1:
|
142 |
+
idx = np.random.randint(0, len(obj))
|
143 |
+
mask_depth = ma.getmaskarray(ma.masked_not_equal(depth, 0))
|
144 |
+
mask_label = ma.getmaskarray(ma.masked_equal(label, obj[idx]))
|
145 |
+
mask = mask_label * mask_depth
|
146 |
+
if len(mask.nonzero()[0]) > self.minimum_num_pt:
|
147 |
+
break
|
148 |
+
|
149 |
+
if self.add_noise:
|
150 |
+
img = self.trancolor(img)
|
151 |
+
|
152 |
+
rmin, rmax, cmin, cmax = get_bbox(mask_label)
|
153 |
+
img = np.transpose(np.array(img)[:, :, :3], (2, 0, 1))[:, rmin:rmax, cmin:cmax]
|
154 |
+
|
155 |
+
if self.list[index][:8] == 'data_syn':
|
156 |
+
seed = random.choice(self.real)
|
157 |
+
back = np.array(self.trancolor(Image.open('{0}/{1}-color.png'.format(self.root, seed)).convert("RGB")))
|
158 |
+
back = np.transpose(back, (2, 0, 1))[:, rmin:rmax, cmin:cmax]
|
159 |
+
img_masked = back * mask_back[rmin:rmax, cmin:cmax] + img
|
160 |
+
else:
|
161 |
+
img_masked = img
|
162 |
+
|
163 |
+
if self.add_noise and add_front:
|
164 |
+
img_masked = img_masked * mask_front[rmin:rmax, cmin:cmax] + front[:, rmin:rmax, cmin:cmax] * ~(mask_front[rmin:rmax, cmin:cmax])
|
165 |
+
|
166 |
+
if self.list[index][:8] == 'data_syn':
|
167 |
+
img_masked = img_masked + np.random.normal(loc=0.0, scale=7.0, size=img_masked.shape)
|
168 |
+
|
169 |
+
# p_img = np.transpose(img_masked, (1, 2, 0))
|
170 |
+
# scipy.misc.imsave('temp/{0}_input.png'.format(index), p_img)
|
171 |
+
# scipy.misc.imsave('temp/{0}_label.png'.format(index), mask[rmin:rmax, cmin:cmax].astype(np.int32))
|
172 |
+
|
173 |
+
target_r = meta['poses'][:, :, idx][:, 0:3]
|
174 |
+
target_t = np.array([meta['poses'][:, :, idx][:, 3:4].flatten()])
|
175 |
+
add_t = np.array([random.uniform(-self.noise_trans, self.noise_trans) for i in range(3)])
|
176 |
+
|
177 |
+
choose = mask[rmin:rmax, cmin:cmax].flatten().nonzero()[0]
|
178 |
+
if len(choose) > self.num_pt:
|
179 |
+
c_mask = np.zeros(len(choose), dtype=int)
|
180 |
+
c_mask[:self.num_pt] = 1
|
181 |
+
np.random.shuffle(c_mask)
|
182 |
+
choose = choose[c_mask.nonzero()]
|
183 |
+
else:
|
184 |
+
choose = np.pad(choose, (0, self.num_pt - len(choose)), 'wrap')
|
185 |
+
|
186 |
+
depth_masked = depth[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
187 |
+
xmap_masked = self.xmap[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
188 |
+
ymap_masked = self.ymap[rmin:rmax, cmin:cmax].flatten()[choose][:, np.newaxis].astype(np.float32)
|
189 |
+
choose = np.array([choose])
|
190 |
+
|
191 |
+
cam_scale = meta['factor_depth'][0][0]
|
192 |
+
pt2 = depth_masked / cam_scale
|
193 |
+
pt0 = (ymap_masked - cam_cx) * pt2 / cam_fx
|
194 |
+
pt1 = (xmap_masked - cam_cy) * pt2 / cam_fy
|
195 |
+
cloud = np.concatenate((pt0, pt1, pt2), axis=1)
|
196 |
+
if self.add_noise:
|
197 |
+
cloud = np.add(cloud, add_t)
|
198 |
+
|
199 |
+
# fw = open('temp/{0}_cld.xyz'.format(index), 'w')
|
200 |
+
# for it in cloud:
|
201 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
202 |
+
# fw.close()
|
203 |
+
|
204 |
+
dellist = [j for j in range(0, len(self.cld[obj[idx]]))]
|
205 |
+
if self.refine:
|
206 |
+
dellist = random.sample(dellist, len(self.cld[obj[idx]]) - self.num_pt_mesh_large)
|
207 |
+
else:
|
208 |
+
dellist = random.sample(dellist, len(self.cld[obj[idx]]) - self.num_pt_mesh_small)
|
209 |
+
model_points = np.delete(self.cld[obj[idx]], dellist, axis=0)
|
210 |
+
|
211 |
+
# fw = open('temp/{0}_model_points.xyz'.format(index), 'w')
|
212 |
+
# for it in model_points:
|
213 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
214 |
+
# fw.close()
|
215 |
+
|
216 |
+
target = np.dot(model_points, target_r.T)
|
217 |
+
if self.add_noise:
|
218 |
+
target = np.add(target, target_t + add_t)
|
219 |
+
else:
|
220 |
+
target = np.add(target, target_t)
|
221 |
+
|
222 |
+
# fw = open('temp/{0}_tar.xyz'.format(index), 'w')
|
223 |
+
# for it in target:
|
224 |
+
# fw.write('{0} {1} {2}\n'.format(it[0], it[1], it[2]))
|
225 |
+
# fw.close()
|
226 |
+
|
227 |
+
return torch.from_numpy(cloud.astype(np.float32)), \
|
228 |
+
torch.LongTensor(choose.astype(np.int32)), \
|
229 |
+
self.norm(torch.from_numpy(img_masked.astype(np.float32))), \
|
230 |
+
torch.from_numpy(target.astype(np.float32)), \
|
231 |
+
torch.from_numpy(model_points.astype(np.float32)), \
|
232 |
+
torch.LongTensor([int(obj[idx]) - 1])
|
233 |
+
|
234 |
+
def __len__(self):
|
235 |
+
return self.length
|
236 |
+
|
237 |
+
def get_sym_list(self):
|
238 |
+
return self.symmetry_obj_idx
|
239 |
+
|
240 |
+
def get_num_points_mesh(self):
|
241 |
+
if self.refine:
|
242 |
+
return self.num_pt_mesh_large
|
243 |
+
else:
|
244 |
+
return self.num_pt_mesh_small
|
245 |
+
|
246 |
+
|
247 |
+
border_list = [-1, 40, 80, 120, 160, 200, 240, 280, 320, 360, 400, 440, 480, 520, 560, 600, 640, 680]
|
248 |
+
img_width = 480
|
249 |
+
img_length = 640
|
250 |
+
|
251 |
+
def get_bbox(label):
|
252 |
+
rows = np.any(label, axis=1)
|
253 |
+
cols = np.any(label, axis=0)
|
254 |
+
rmin, rmax = np.where(rows)[0][[0, -1]]
|
255 |
+
cmin, cmax = np.where(cols)[0][[0, -1]]
|
256 |
+
rmax += 1
|
257 |
+
cmax += 1
|
258 |
+
r_b = rmax - rmin
|
259 |
+
for tt in range(len(border_list)):
|
260 |
+
if r_b > border_list[tt] and r_b < border_list[tt + 1]:
|
261 |
+
r_b = border_list[tt + 1]
|
262 |
+
break
|
263 |
+
c_b = cmax - cmin
|
264 |
+
for tt in range(len(border_list)):
|
265 |
+
if c_b > border_list[tt] and c_b < border_list[tt + 1]:
|
266 |
+
c_b = border_list[tt + 1]
|
267 |
+
break
|
268 |
+
center = [int((rmin + rmax) / 2), int((cmin + cmax) / 2)]
|
269 |
+
rmin = center[0] - int(r_b / 2)
|
270 |
+
rmax = center[0] + int(r_b / 2)
|
271 |
+
cmin = center[1] - int(c_b / 2)
|
272 |
+
cmax = center[1] + int(c_b / 2)
|
273 |
+
if rmin < 0:
|
274 |
+
delt = -rmin
|
275 |
+
rmin = 0
|
276 |
+
rmax += delt
|
277 |
+
if cmin < 0:
|
278 |
+
delt = -cmin
|
279 |
+
cmin = 0
|
280 |
+
cmax += delt
|
281 |
+
if rmax > img_width:
|
282 |
+
delt = rmax - img_width
|
283 |
+
rmax = img_width
|
284 |
+
rmin -= delt
|
285 |
+
if cmax > img_length:
|
286 |
+
delt = cmax - img_length
|
287 |
+
cmax = img_length
|
288 |
+
cmin -= delt
|
289 |
+
return rmin, rmax, cmin, cmax
|
datasets/ycb/dataset_config/classes.txt
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
002_master_chef_can
|
2 |
+
003_cracker_box
|
3 |
+
004_sugar_box
|
4 |
+
005_tomato_soup_can
|
5 |
+
006_mustard_bottle
|
6 |
+
007_tuna_fish_can
|
7 |
+
008_pudding_box
|
8 |
+
009_gelatin_box
|
9 |
+
010_potted_meat_can
|
10 |
+
011_banana
|
11 |
+
019_pitcher_base
|
12 |
+
021_bleach_cleanser
|
13 |
+
024_bowl
|
14 |
+
025_mug
|
15 |
+
035_power_drill
|
16 |
+
036_wood_block
|
17 |
+
037_scissors
|
18 |
+
040_large_marker
|
19 |
+
051_large_clamp
|
20 |
+
052_extra_large_clamp
|
21 |
+
061_foam_brick
|
datasets/ycb/dataset_config/test_data_list.txt
ADDED
@@ -0,0 +1,2949 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
data/0048/000001
|
2 |
+
data/0048/000036
|
3 |
+
data/0048/000047
|
4 |
+
data/0048/000083
|
5 |
+
data/0048/000112
|
6 |
+
data/0048/000135
|
7 |
+
data/0048/000155
|
8 |
+
data/0048/000168
|
9 |
+
data/0048/000181
|
10 |
+
data/0048/000204
|
11 |
+
data/0048/000220
|
12 |
+
data/0048/000235
|
13 |
+
data/0048/000252
|
14 |
+
data/0048/000257
|
15 |
+
data/0048/000261
|
16 |
+
data/0048/000265
|
17 |
+
data/0048/000277
|
18 |
+
data/0048/000292
|
19 |
+
data/0048/000294
|
20 |
+
data/0048/000310
|
21 |
+
data/0048/000311
|
22 |
+
data/0048/000322
|
23 |
+
data/0048/000326
|
24 |
+
data/0048/000335
|
25 |
+
data/0048/000337
|
26 |
+
data/0048/000353
|
27 |
+
data/0048/000356
|
28 |
+
data/0048/000364
|
29 |
+
data/0048/000373
|
30 |
+
data/0048/000380
|
31 |
+
data/0048/000388
|
32 |
+
data/0048/000398
|
33 |
+
data/0048/000401
|
34 |
+
data/0048/000404
|
35 |
+
data/0048/000415
|
36 |
+
data/0048/000422
|
37 |
+
data/0048/000435
|
38 |
+
data/0048/000448
|
39 |
+
data/0048/000464
|
40 |
+
data/0048/000476
|
41 |
+
data/0048/000485
|
42 |
+
data/0048/000494
|
43 |
+
data/0048/000510
|
44 |
+
data/0048/000516
|
45 |
+
data/0048/000529
|
46 |
+
data/0048/000541
|
47 |
+
data/0048/000549
|
48 |
+
data/0048/000556
|
49 |
+
data/0048/000567
|
50 |
+
data/0048/000573
|
51 |
+
data/0048/000581
|
52 |
+
data/0048/000592
|
53 |
+
data/0048/000605
|
54 |
+
data/0048/000614
|
55 |
+
data/0048/000626
|
56 |
+
data/0048/000636
|
57 |
+
data/0048/000648
|
58 |
+
data/0048/000653
|
59 |
+
data/0048/000656
|
60 |
+
data/0048/000664
|
61 |
+
data/0048/000672
|
62 |
+
data/0048/000685
|
63 |
+
data/0048/000694
|
64 |
+
data/0048/000733
|
65 |
+
data/0048/000734
|
66 |
+
data/0048/000756
|
67 |
+
data/0048/000779
|
68 |
+
data/0048/000786
|
69 |
+
data/0048/000803
|
70 |
+
data/0048/000806
|
71 |
+
data/0048/000818
|
72 |
+
data/0048/000820
|
73 |
+
data/0048/000821
|
74 |
+
data/0048/000834
|
75 |
+
data/0048/000836
|
76 |
+
data/0048/000854
|
77 |
+
data/0048/000855
|
78 |
+
data/0048/000858
|
79 |
+
data/0048/000862
|
80 |
+
data/0048/000871
|
81 |
+
data/0048/000874
|
82 |
+
data/0048/000884
|
83 |
+
data/0048/000892
|
84 |
+
data/0048/000899
|
85 |
+
data/0048/000902
|
86 |
+
data/0048/000905
|
87 |
+
data/0048/000907
|
88 |
+
data/0048/000913
|
89 |
+
data/0048/000919
|
90 |
+
data/0048/000927
|
91 |
+
data/0048/000937
|
92 |
+
data/0048/000942
|
93 |
+
data/0048/000943
|
94 |
+
data/0048/000950
|
95 |
+
data/0048/000958
|
96 |
+
data/0048/000959
|
97 |
+
data/0048/000971
|
98 |
+
data/0048/000991
|
99 |
+
data/0048/000996
|
100 |
+
data/0048/001002
|
101 |
+
data/0048/001012
|
102 |
+
data/0048/001024
|
103 |
+
data/0048/001027
|
104 |
+
data/0048/001059
|
105 |
+
data/0048/001065
|
106 |
+
data/0048/001074
|
107 |
+
data/0048/001075
|
108 |
+
data/0048/001083
|
109 |
+
data/0048/001087
|
110 |
+
data/0048/001090
|
111 |
+
data/0048/001094
|
112 |
+
data/0048/001097
|
113 |
+
data/0048/001103
|
114 |
+
data/0048/001104
|
115 |
+
data/0048/001107
|
116 |
+
data/0048/001113
|
117 |
+
data/0048/001119
|
118 |
+
data/0048/001120
|
119 |
+
data/0048/001122
|
120 |
+
data/0048/001124
|
121 |
+
data/0048/001126
|
122 |
+
data/0048/001128
|
123 |
+
data/0048/001132
|
124 |
+
data/0048/001133
|
125 |
+
data/0048/001135
|
126 |
+
data/0048/001137
|
127 |
+
data/0048/001146
|
128 |
+
data/0048/001147
|
129 |
+
data/0048/001149
|
130 |
+
data/0048/001151
|
131 |
+
data/0048/001154
|
132 |
+
data/0048/001155
|
133 |
+
data/0048/001156
|
134 |
+
data/0048/001157
|
135 |
+
data/0048/001158
|
136 |
+
data/0048/001160
|
137 |
+
data/0048/001165
|
138 |
+
data/0048/001168
|
139 |
+
data/0048/001169
|
140 |
+
data/0048/001170
|
141 |
+
data/0048/001171
|
142 |
+
data/0048/001172
|
143 |
+
data/0048/001173
|
144 |
+
data/0048/001175
|
145 |
+
data/0048/001177
|
146 |
+
data/0048/001195
|
147 |
+
data/0048/001199
|
148 |
+
data/0048/001202
|
149 |
+
data/0048/001205
|
150 |
+
data/0048/001207
|
151 |
+
data/0048/001211
|
152 |
+
data/0048/001212
|
153 |
+
data/0048/001219
|
154 |
+
data/0048/001226
|
155 |
+
data/0048/001231
|
156 |
+
data/0048/001233
|
157 |
+
data/0048/001235
|
158 |
+
data/0048/001251
|
159 |
+
data/0048/001261
|
160 |
+
data/0048/001267
|
161 |
+
data/0048/001275
|
162 |
+
data/0048/001289
|
163 |
+
data/0048/001296
|
164 |
+
data/0048/001299
|
165 |
+
data/0048/001300
|
166 |
+
data/0048/001301
|
167 |
+
data/0048/001302
|
168 |
+
data/0048/001303
|
169 |
+
data/0048/001304
|
170 |
+
data/0048/001305
|
171 |
+
data/0048/001307
|
172 |
+
data/0048/001308
|
173 |
+
data/0048/001309
|
174 |
+
data/0048/001310
|
175 |
+
data/0048/001313
|
176 |
+
data/0048/001319
|
177 |
+
data/0048/001328
|
178 |
+
data/0048/001330
|
179 |
+
data/0048/001333
|
180 |
+
data/0048/001334
|
181 |
+
data/0048/001337
|
182 |
+
data/0048/001338
|
183 |
+
data/0048/001343
|
184 |
+
data/0048/001367
|
185 |
+
data/0048/001397
|
186 |
+
data/0048/001405
|
187 |
+
data/0048/001412
|
188 |
+
data/0048/001413
|
189 |
+
data/0048/001426
|
190 |
+
data/0048/001442
|
191 |
+
data/0048/001457
|
192 |
+
data/0048/001478
|
193 |
+
data/0048/001493
|
194 |
+
data/0048/001494
|
195 |
+
data/0048/001513
|
196 |
+
data/0048/001533
|
197 |
+
data/0048/001534
|
198 |
+
data/0048/001539
|
199 |
+
data/0048/001540
|
200 |
+
data/0048/001542
|
201 |
+
data/0048/001543
|
202 |
+
data/0048/001568
|
203 |
+
data/0048/001569
|
204 |
+
data/0048/001571
|
205 |
+
data/0048/001572
|
206 |
+
data/0048/001576
|
207 |
+
data/0048/001577
|
208 |
+
data/0048/001579
|
209 |
+
data/0048/001580
|
210 |
+
data/0048/001583
|
211 |
+
data/0048/001586
|
212 |
+
data/0048/001587
|
213 |
+
data/0048/001588
|
214 |
+
data/0048/001589
|
215 |
+
data/0048/001595
|
216 |
+
data/0048/001597
|
217 |
+
data/0048/001602
|
218 |
+
data/0048/001603
|
219 |
+
data/0048/001604
|
220 |
+
data/0048/001605
|
221 |
+
data/0048/001608
|
222 |
+
data/0048/001609
|
223 |
+
data/0048/001611
|
224 |
+
data/0048/001617
|
225 |
+
data/0048/001619
|
226 |
+
data/0048/001621
|
227 |
+
data/0048/001622
|
228 |
+
data/0048/001623
|
229 |
+
data/0048/001624
|
230 |
+
data/0048/001626
|
231 |
+
data/0048/001627
|
232 |
+
data/0048/001629
|
233 |
+
data/0048/001631
|
234 |
+
data/0048/001639
|
235 |
+
data/0048/001641
|
236 |
+
data/0048/001643
|
237 |
+
data/0048/001644
|
238 |
+
data/0048/001645
|
239 |
+
data/0048/001647
|
240 |
+
data/0048/001648
|
241 |
+
data/0048/001649
|
242 |
+
data/0048/001654
|
243 |
+
data/0048/001656
|
244 |
+
data/0048/001658
|
245 |
+
data/0048/001666
|
246 |
+
data/0048/001667
|
247 |
+
data/0048/001669
|
248 |
+
data/0048/001671
|
249 |
+
data/0048/001672
|
250 |
+
data/0048/001674
|
251 |
+
data/0048/001675
|
252 |
+
data/0048/001676
|
253 |
+
data/0048/001678
|
254 |
+
data/0048/001679
|
255 |
+
data/0048/001682
|
256 |
+
data/0048/001684
|
257 |
+
data/0048/001685
|
258 |
+
data/0048/001690
|
259 |
+
data/0048/001691
|
260 |
+
data/0048/001693
|
261 |
+
data/0048/001696
|
262 |
+
data/0048/001699
|
263 |
+
data/0048/001701
|
264 |
+
data/0048/001705
|
265 |
+
data/0048/001707
|
266 |
+
data/0048/001708
|
267 |
+
data/0048/001709
|
268 |
+
data/0048/001714
|
269 |
+
data/0048/001715
|
270 |
+
data/0048/001717
|
271 |
+
data/0048/001719
|
272 |
+
data/0048/001722
|
273 |
+
data/0048/001723
|
274 |
+
data/0048/001724
|
275 |
+
data/0048/001725
|
276 |
+
data/0048/001730
|
277 |
+
data/0048/001731
|
278 |
+
data/0048/001733
|
279 |
+
data/0048/001735
|
280 |
+
data/0048/001742
|
281 |
+
data/0048/001744
|
282 |
+
data/0048/001745
|
283 |
+
data/0048/001749
|
284 |
+
data/0048/001750
|
285 |
+
data/0048/001753
|
286 |
+
data/0048/001754
|
287 |
+
data/0048/001759
|
288 |
+
data/0048/001760
|
289 |
+
data/0048/001763
|
290 |
+
data/0048/001765
|
291 |
+
data/0048/001766
|
292 |
+
data/0048/001768
|
293 |
+
data/0048/001769
|
294 |
+
data/0048/001771
|
295 |
+
data/0048/001772
|
296 |
+
data/0048/001773
|
297 |
+
data/0048/001776
|
298 |
+
data/0048/001777
|
299 |
+
data/0048/001779
|
300 |
+
data/0048/001780
|
301 |
+
data/0048/001783
|
302 |
+
data/0048/001784
|
303 |
+
data/0048/001785
|
304 |
+
data/0048/001786
|
305 |
+
data/0048/001787
|
306 |
+
data/0048/001789
|
307 |
+
data/0048/001794
|
308 |
+
data/0048/001795
|
309 |
+
data/0048/001796
|
310 |
+
data/0048/001805
|
311 |
+
data/0048/001806
|
312 |
+
data/0048/001808
|
313 |
+
data/0048/001810
|
314 |
+
data/0048/001814
|
315 |
+
data/0048/001819
|
316 |
+
data/0048/001822
|
317 |
+
data/0048/001823
|
318 |
+
data/0048/001825
|
319 |
+
data/0048/001826
|
320 |
+
data/0048/001831
|
321 |
+
data/0048/001834
|
322 |
+
data/0048/001838
|
323 |
+
data/0048/001840
|
324 |
+
data/0048/001841
|
325 |
+
data/0048/001842
|
326 |
+
data/0048/001849
|
327 |
+
data/0048/001854
|
328 |
+
data/0048/001859
|
329 |
+
data/0048/001862
|
330 |
+
data/0048/001866
|
331 |
+
data/0048/001870
|
332 |
+
data/0048/001871
|
333 |
+
data/0048/001872
|
334 |
+
data/0048/001874
|
335 |
+
data/0048/001879
|
336 |
+
data/0048/001881
|
337 |
+
data/0048/001884
|
338 |
+
data/0048/001886
|
339 |
+
data/0048/001888
|
340 |
+
data/0048/001890
|
341 |
+
data/0048/001892
|
342 |
+
data/0048/001893
|
343 |
+
data/0048/001894
|
344 |
+
data/0048/001897
|
345 |
+
data/0048/001898
|
346 |
+
data/0048/001901
|
347 |
+
data/0048/001902
|
348 |
+
data/0048/001907
|
349 |
+
data/0048/001913
|
350 |
+
data/0048/001917
|
351 |
+
data/0048/001921
|
352 |
+
data/0048/001931
|
353 |
+
data/0048/001932
|
354 |
+
data/0048/001934
|
355 |
+
data/0048/001939
|
356 |
+
data/0048/001942
|
357 |
+
data/0048/001945
|
358 |
+
data/0048/001947
|
359 |
+
data/0048/001949
|
360 |
+
data/0048/001954
|
361 |
+
data/0048/001957
|
362 |
+
data/0048/001964
|
363 |
+
data/0048/001965
|
364 |
+
data/0048/001983
|
365 |
+
data/0048/001985
|
366 |
+
data/0048/001988
|
367 |
+
data/0048/001991
|
368 |
+
data/0048/001994
|
369 |
+
data/0048/002004
|
370 |
+
data/0048/002007
|
371 |
+
data/0048/002015
|
372 |
+
data/0048/002016
|
373 |
+
data/0048/002021
|
374 |
+
data/0048/002029
|
375 |
+
data/0048/002037
|
376 |
+
data/0048/002039
|
377 |
+
data/0048/002040
|
378 |
+
data/0048/002050
|
379 |
+
data/0048/002051
|
380 |
+
data/0048/002066
|
381 |
+
data/0048/002069
|
382 |
+
data/0048/002070
|
383 |
+
data/0048/002083
|
384 |
+
data/0048/002090
|
385 |
+
data/0048/002104
|
386 |
+
data/0048/002118
|
387 |
+
data/0048/002133
|
388 |
+
data/0048/002144
|
389 |
+
data/0048/002160
|
390 |
+
data/0048/002173
|
391 |
+
data/0048/002175
|
392 |
+
data/0048/002192
|
393 |
+
data/0048/002198
|
394 |
+
data/0048/002217
|
395 |
+
data/0049/000001
|
396 |
+
data/0049/000036
|
397 |
+
data/0049/000047
|
398 |
+
data/0049/000058
|
399 |
+
data/0049/000065
|
400 |
+
data/0049/000073
|
401 |
+
data/0049/000091
|
402 |
+
data/0049/000109
|
403 |
+
data/0049/000125
|
404 |
+
data/0049/000148
|
405 |
+
data/0049/000170
|
406 |
+
data/0049/000187
|
407 |
+
data/0049/000206
|
408 |
+
data/0049/000222
|
409 |
+
data/0049/000230
|
410 |
+
data/0049/000231
|
411 |
+
data/0049/000235
|
412 |
+
data/0049/000239
|
413 |
+
data/0049/000242
|
414 |
+
data/0049/000246
|
415 |
+
data/0049/000247
|
416 |
+
data/0049/000251
|
417 |
+
data/0049/000252
|
418 |
+
data/0049/000261
|
419 |
+
data/0049/000262
|
420 |
+
data/0049/000270
|
421 |
+
data/0049/000273
|
422 |
+
data/0049/000282
|
423 |
+
data/0049/000289
|
424 |
+
data/0049/000295
|
425 |
+
data/0049/000304
|
426 |
+
data/0049/000310
|
427 |
+
data/0049/000315
|
428 |
+
data/0049/000316
|
429 |
+
data/0049/000324
|
430 |
+
data/0049/000326
|
431 |
+
data/0049/000333
|
432 |
+
data/0049/000335
|
433 |
+
data/0049/000348
|
434 |
+
data/0049/000352
|
435 |
+
data/0049/000357
|
436 |
+
data/0049/000370
|
437 |
+
data/0049/000376
|
438 |
+
data/0049/000394
|
439 |
+
data/0049/000405
|
440 |
+
data/0049/000422
|
441 |
+
data/0049/000427
|
442 |
+
data/0049/000457
|
443 |
+
data/0049/000461
|
444 |
+
data/0049/000470
|
445 |
+
data/0049/000474
|
446 |
+
data/0049/000485
|
447 |
+
data/0049/000489
|
448 |
+
data/0049/000509
|
449 |
+
data/0049/000511
|
450 |
+
data/0049/000521
|
451 |
+
data/0049/000526
|
452 |
+
data/0049/000537
|
453 |
+
data/0049/000542
|
454 |
+
data/0049/000552
|
455 |
+
data/0049/000558
|
456 |
+
data/0049/000564
|
457 |
+
data/0049/000570
|
458 |
+
data/0049/000576
|
459 |
+
data/0049/000591
|
460 |
+
data/0049/000594
|
461 |
+
data/0049/000617
|
462 |
+
data/0049/000629
|
463 |
+
data/0049/000631
|
464 |
+
data/0049/000651
|
465 |
+
data/0049/000669
|
466 |
+
data/0049/000671
|
467 |
+
data/0049/000693
|
468 |
+
data/0049/000695
|
469 |
+
data/0049/000711
|
470 |
+
data/0049/000714
|
471 |
+
data/0049/000735
|
472 |
+
data/0049/000751
|
473 |
+
data/0049/000762
|
474 |
+
data/0049/000769
|
475 |
+
data/0049/000783
|
476 |
+
data/0049/000785
|
477 |
+
data/0049/000798
|
478 |
+
data/0049/000802
|
479 |
+
data/0049/000811
|
480 |
+
data/0049/000817
|
481 |
+
data/0049/000825
|
482 |
+
data/0049/000833
|
483 |
+
data/0049/000840
|
484 |
+
data/0049/000849
|
485 |
+
data/0049/000858
|
486 |
+
data/0049/000884
|
487 |
+
data/0049/000900
|
488 |
+
data/0049/000914
|
489 |
+
data/0049/000949
|
490 |
+
data/0049/000951
|
491 |
+
data/0049/000981
|
492 |
+
data/0049/000983
|
493 |
+
data/0049/001009
|
494 |
+
data/0049/001024
|
495 |
+
data/0049/001036
|
496 |
+
data/0049/001052
|
497 |
+
data/0049/001067
|
498 |
+
data/0049/001095
|
499 |
+
data/0049/001102
|
500 |
+
data/0049/001130
|
501 |
+
data/0049/001144
|
502 |
+
data/0049/001150
|
503 |
+
data/0049/001157
|
504 |
+
data/0049/001168
|
505 |
+
data/0049/001172
|
506 |
+
data/0049/001179
|
507 |
+
data/0049/001187
|
508 |
+
data/0049/001190
|
509 |
+
data/0049/001205
|
510 |
+
data/0049/001222
|
511 |
+
data/0049/001229
|
512 |
+
data/0049/001234
|
513 |
+
data/0049/001247
|
514 |
+
data/0049/001250
|
515 |
+
data/0049/001262
|
516 |
+
data/0049/001266
|
517 |
+
data/0049/001269
|
518 |
+
data/0049/001280
|
519 |
+
data/0049/001283
|
520 |
+
data/0049/001304
|
521 |
+
data/0049/001305
|
522 |
+
data/0049/001319
|
523 |
+
data/0049/001321
|
524 |
+
data/0049/001340
|
525 |
+
data/0049/001344
|
526 |
+
data/0049/001353
|
527 |
+
data/0049/001356
|
528 |
+
data/0049/001391
|
529 |
+
data/0049/001408
|
530 |
+
data/0049/001413
|
531 |
+
data/0049/001429
|
532 |
+
data/0049/001431
|
533 |
+
data/0049/001441
|
534 |
+
data/0049/001446
|
535 |
+
data/0049/001461
|
536 |
+
data/0049/001466
|
537 |
+
data/0049/001474
|
538 |
+
data/0049/001477
|
539 |
+
data/0049/001488
|
540 |
+
data/0049/001491
|
541 |
+
data/0049/001499
|
542 |
+
data/0049/001515
|
543 |
+
data/0049/001518
|
544 |
+
data/0049/001554
|
545 |
+
data/0049/001557
|
546 |
+
data/0049/001565
|
547 |
+
data/0049/001568
|
548 |
+
data/0049/001575
|
549 |
+
data/0049/001582
|
550 |
+
data/0049/001587
|
551 |
+
data/0049/001592
|
552 |
+
data/0049/001595
|
553 |
+
data/0049/001614
|
554 |
+
data/0049/001615
|
555 |
+
data/0049/001618
|
556 |
+
data/0049/001631
|
557 |
+
data/0049/001641
|
558 |
+
data/0049/001644
|
559 |
+
data/0049/001655
|
560 |
+
data/0049/001671
|
561 |
+
data/0049/001677
|
562 |
+
data/0049/001682
|
563 |
+
data/0049/001688
|
564 |
+
data/0049/001699
|
565 |
+
data/0049/001704
|
566 |
+
data/0049/001707
|
567 |
+
data/0049/001719
|
568 |
+
data/0049/001732
|
569 |
+
data/0049/001743
|
570 |
+
data/0049/001759
|
571 |
+
data/0049/001760
|
572 |
+
data/0049/001773
|
573 |
+
data/0049/001776
|
574 |
+
data/0049/001784
|
575 |
+
data/0049/001789
|
576 |
+
data/0049/001796
|
577 |
+
data/0049/001804
|
578 |
+
data/0049/001816
|
579 |
+
data/0049/001828
|
580 |
+
data/0049/001842
|
581 |
+
data/0049/001848
|
582 |
+
data/0049/001854
|
583 |
+
data/0049/001857
|
584 |
+
data/0049/001869
|
585 |
+
data/0049/001875
|
586 |
+
data/0049/001881
|
587 |
+
data/0049/001886
|
588 |
+
data/0049/001889
|
589 |
+
data/0049/001902
|
590 |
+
data/0049/001909
|
591 |
+
data/0049/001926
|
592 |
+
data/0049/001934
|
593 |
+
data/0049/001936
|
594 |
+
data/0049/001937
|
595 |
+
data/0049/001968
|
596 |
+
data/0049/001969
|
597 |
+
data/0049/001988
|
598 |
+
data/0049/001994
|
599 |
+
data/0049/002012
|
600 |
+
data/0049/002014
|
601 |
+
data/0049/002035
|
602 |
+
data/0049/002037
|
603 |
+
data/0049/002043
|
604 |
+
data/0049/002061
|
605 |
+
data/0049/002064
|
606 |
+
data/0049/002089
|
607 |
+
data/0049/002097
|
608 |
+
data/0049/002119
|
609 |
+
data/0049/002123
|
610 |
+
data/0049/002146
|
611 |
+
data/0049/002151
|
612 |
+
data/0049/002173
|
613 |
+
data/0049/002196
|
614 |
+
data/0049/002207
|
615 |
+
data/0049/002219
|
616 |
+
data/0049/002235
|
617 |
+
data/0049/002275
|
618 |
+
data/0049/002287
|
619 |
+
data/0049/002315
|
620 |
+
data/0049/002337
|
621 |
+
data/0049/002365
|
622 |
+
data/0050/000001
|
623 |
+
data/0050/000041
|
624 |
+
data/0050/000055
|
625 |
+
data/0050/000076
|
626 |
+
data/0050/000099
|
627 |
+
data/0050/000111
|
628 |
+
data/0050/000122
|
629 |
+
data/0050/000142
|
630 |
+
data/0050/000181
|
631 |
+
data/0050/000204
|
632 |
+
data/0050/000222
|
633 |
+
data/0050/000231
|
634 |
+
data/0050/000237
|
635 |
+
data/0050/000241
|
636 |
+
data/0050/000249
|
637 |
+
data/0050/000252
|
638 |
+
data/0050/000263
|
639 |
+
data/0050/000265
|
640 |
+
data/0050/000273
|
641 |
+
data/0050/000275
|
642 |
+
data/0050/000290
|
643 |
+
data/0050/000293
|
644 |
+
data/0050/000303
|
645 |
+
data/0050/000315
|
646 |
+
data/0050/000327
|
647 |
+
data/0050/000337
|
648 |
+
data/0050/000345
|
649 |
+
data/0050/000347
|
650 |
+
data/0050/000363
|
651 |
+
data/0050/000366
|
652 |
+
data/0050/000381
|
653 |
+
data/0050/000386
|
654 |
+
data/0050/000405
|
655 |
+
data/0050/000413
|
656 |
+
data/0050/000420
|
657 |
+
data/0050/000437
|
658 |
+
data/0050/000438
|
659 |
+
data/0050/000464
|
660 |
+
data/0050/000466
|
661 |
+
data/0050/000483
|
662 |
+
data/0050/000505
|
663 |
+
data/0050/000509
|
664 |
+
data/0050/000558
|
665 |
+
data/0050/000588
|
666 |
+
data/0050/000605
|
667 |
+
data/0050/000610
|
668 |
+
data/0050/000620
|
669 |
+
data/0050/000623
|
670 |
+
data/0050/000629
|
671 |
+
data/0050/000633
|
672 |
+
data/0050/000640
|
673 |
+
data/0050/000642
|
674 |
+
data/0050/000644
|
675 |
+
data/0050/000651
|
676 |
+
data/0050/000653
|
677 |
+
data/0050/000660
|
678 |
+
data/0050/000662
|
679 |
+
data/0050/000671
|
680 |
+
data/0050/000681
|
681 |
+
data/0050/000682
|
682 |
+
data/0050/000701
|
683 |
+
data/0050/000712
|
684 |
+
data/0050/000721
|
685 |
+
data/0050/000722
|
686 |
+
data/0050/000735
|
687 |
+
data/0050/000737
|
688 |
+
data/0050/000751
|
689 |
+
data/0050/000757
|
690 |
+
data/0050/000769
|
691 |
+
data/0050/000775
|
692 |
+
data/0050/000786
|
693 |
+
data/0050/000790
|
694 |
+
data/0050/000798
|
695 |
+
data/0050/000802
|
696 |
+
data/0050/000811
|
697 |
+
data/0050/000822
|
698 |
+
data/0050/000830
|
699 |
+
data/0050/000831
|
700 |
+
data/0050/000846
|
701 |
+
data/0050/000850
|
702 |
+
data/0050/000876
|
703 |
+
data/0050/000887
|
704 |
+
data/0050/000891
|
705 |
+
data/0050/000902
|
706 |
+
data/0050/000905
|
707 |
+
data/0050/000916
|
708 |
+
data/0050/000918
|
709 |
+
data/0050/000923
|
710 |
+
data/0050/000926
|
711 |
+
data/0050/000928
|
712 |
+
data/0050/000943
|
713 |
+
data/0050/000961
|
714 |
+
data/0050/000963
|
715 |
+
data/0050/000978
|
716 |
+
data/0050/000979
|
717 |
+
data/0050/000988
|
718 |
+
data/0050/000994
|
719 |
+
data/0050/001000
|
720 |
+
data/0050/001008
|
721 |
+
data/0050/001014
|
722 |
+
data/0050/001023
|
723 |
+
data/0050/001026
|
724 |
+
data/0050/001044
|
725 |
+
data/0050/001052
|
726 |
+
data/0050/001058
|
727 |
+
data/0050/001066
|
728 |
+
data/0050/001077
|
729 |
+
data/0050/001094
|
730 |
+
data/0050/001098
|
731 |
+
data/0050/001105
|
732 |
+
data/0050/001113
|
733 |
+
data/0050/001125
|
734 |
+
data/0050/001130
|
735 |
+
data/0050/001145
|
736 |
+
data/0050/001150
|
737 |
+
data/0050/001169
|
738 |
+
data/0050/001189
|
739 |
+
data/0050/001192
|
740 |
+
data/0050/001209
|
741 |
+
data/0050/001216
|
742 |
+
data/0050/001242
|
743 |
+
data/0050/001246
|
744 |
+
data/0050/001263
|
745 |
+
data/0050/001266
|
746 |
+
data/0050/001280
|
747 |
+
data/0050/001284
|
748 |
+
data/0050/001297
|
749 |
+
data/0050/001305
|
750 |
+
data/0050/001324
|
751 |
+
data/0050/001335
|
752 |
+
data/0050/001346
|
753 |
+
data/0050/001350
|
754 |
+
data/0050/001364
|
755 |
+
data/0050/001371
|
756 |
+
data/0050/001389
|
757 |
+
data/0050/001391
|
758 |
+
data/0050/001402
|
759 |
+
data/0050/001412
|
760 |
+
data/0050/001414
|
761 |
+
data/0050/001421
|
762 |
+
data/0050/001433
|
763 |
+
data/0050/001446
|
764 |
+
data/0050/001448
|
765 |
+
data/0050/001455
|
766 |
+
data/0050/001457
|
767 |
+
data/0050/001461
|
768 |
+
data/0050/001462
|
769 |
+
data/0050/001466
|
770 |
+
data/0050/001470
|
771 |
+
data/0050/001475
|
772 |
+
data/0050/001480
|
773 |
+
data/0050/001485
|
774 |
+
data/0050/001492
|
775 |
+
data/0050/001499
|
776 |
+
data/0050/001509
|
777 |
+
data/0050/001514
|
778 |
+
data/0050/001526
|
779 |
+
data/0050/001542
|
780 |
+
data/0050/001564
|
781 |
+
data/0050/001592
|
782 |
+
data/0050/001593
|
783 |
+
data/0050/001613
|
784 |
+
data/0050/001629
|
785 |
+
data/0050/001656
|
786 |
+
data/0050/001658
|
787 |
+
data/0050/001669
|
788 |
+
data/0050/001678
|
789 |
+
data/0050/001695
|
790 |
+
data/0050/001708
|
791 |
+
data/0050/001711
|
792 |
+
data/0050/001713
|
793 |
+
data/0050/001714
|
794 |
+
data/0050/001718
|
795 |
+
data/0050/001719
|
796 |
+
data/0050/001733
|
797 |
+
data/0050/001743
|
798 |
+
data/0050/001745
|
799 |
+
data/0050/001756
|
800 |
+
data/0050/001778
|
801 |
+
data/0050/001782
|
802 |
+
data/0050/001800
|
803 |
+
data/0050/001828
|
804 |
+
data/0050/001844
|
805 |
+
data/0050/001849
|
806 |
+
data/0050/001850
|
807 |
+
data/0050/001866
|
808 |
+
data/0050/001868
|
809 |
+
data/0050/001874
|
810 |
+
data/0050/001893
|
811 |
+
data/0050/001897
|
812 |
+
data/0051/000001
|
813 |
+
data/0051/000035
|
814 |
+
data/0051/000056
|
815 |
+
data/0051/000083
|
816 |
+
data/0051/000100
|
817 |
+
data/0051/000116
|
818 |
+
data/0051/000138
|
819 |
+
data/0051/000169
|
820 |
+
data/0051/000197
|
821 |
+
data/0051/000208
|
822 |
+
data/0051/000219
|
823 |
+
data/0051/000237
|
824 |
+
data/0051/000240
|
825 |
+
data/0051/000249
|
826 |
+
data/0051/000263
|
827 |
+
data/0051/000275
|
828 |
+
data/0051/000284
|
829 |
+
data/0051/000295
|
830 |
+
data/0051/000304
|
831 |
+
data/0051/000313
|
832 |
+
data/0051/000318
|
833 |
+
data/0051/000335
|
834 |
+
data/0051/000339
|
835 |
+
data/0051/000347
|
836 |
+
data/0051/000353
|
837 |
+
data/0051/000366
|
838 |
+
data/0051/000368
|
839 |
+
data/0051/000371
|
840 |
+
data/0051/000387
|
841 |
+
data/0051/000389
|
842 |
+
data/0051/000402
|
843 |
+
data/0051/000407
|
844 |
+
data/0051/000417
|
845 |
+
data/0051/000421
|
846 |
+
data/0051/000431
|
847 |
+
data/0051/000439
|
848 |
+
data/0051/000444
|
849 |
+
data/0051/000454
|
850 |
+
data/0051/000464
|
851 |
+
data/0051/000475
|
852 |
+
data/0051/000482
|
853 |
+
data/0051/000497
|
854 |
+
data/0051/000501
|
855 |
+
data/0051/000515
|
856 |
+
data/0051/000520
|
857 |
+
data/0051/000532
|
858 |
+
data/0051/000546
|
859 |
+
data/0051/000553
|
860 |
+
data/0051/000571
|
861 |
+
data/0051/000582
|
862 |
+
data/0051/000601
|
863 |
+
data/0051/000611
|
864 |
+
data/0051/000618
|
865 |
+
data/0051/000636
|
866 |
+
data/0051/000637
|
867 |
+
data/0051/000649
|
868 |
+
data/0051/000654
|
869 |
+
data/0051/000669
|
870 |
+
data/0051/000672
|
871 |
+
data/0051/000675
|
872 |
+
data/0051/000681
|
873 |
+
data/0051/000685
|
874 |
+
data/0051/000694
|
875 |
+
data/0051/000698
|
876 |
+
data/0051/000713
|
877 |
+
data/0051/000721
|
878 |
+
data/0051/000736
|
879 |
+
data/0051/000740
|
880 |
+
data/0051/000756
|
881 |
+
data/0051/000762
|
882 |
+
data/0051/000773
|
883 |
+
data/0051/000777
|
884 |
+
data/0051/000780
|
885 |
+
data/0051/000789
|
886 |
+
data/0051/000793
|
887 |
+
data/0051/000798
|
888 |
+
data/0051/000803
|
889 |
+
data/0051/000814
|
890 |
+
data/0051/000819
|
891 |
+
data/0051/000834
|
892 |
+
data/0051/000840
|
893 |
+
data/0051/000848
|
894 |
+
data/0051/000852
|
895 |
+
data/0051/000864
|
896 |
+
data/0051/000868
|
897 |
+
data/0051/000893
|
898 |
+
data/0051/000897
|
899 |
+
data/0051/000912
|
900 |
+
data/0051/000916
|
901 |
+
data/0051/000929
|
902 |
+
data/0051/000930
|
903 |
+
data/0051/000939
|
904 |
+
data/0051/000943
|
905 |
+
data/0051/000946
|
906 |
+
data/0051/000961
|
907 |
+
data/0051/000964
|
908 |
+
data/0051/000982
|
909 |
+
data/0051/000986
|
910 |
+
data/0051/000997
|
911 |
+
data/0051/001005
|
912 |
+
data/0051/001014
|
913 |
+
data/0051/001021
|
914 |
+
data/0051/001038
|
915 |
+
data/0051/001045
|
916 |
+
data/0051/001057
|
917 |
+
data/0051/001063
|
918 |
+
data/0051/001076
|
919 |
+
data/0051/001080
|
920 |
+
data/0051/001091
|
921 |
+
data/0051/001092
|
922 |
+
data/0051/001110
|
923 |
+
data/0051/001111
|
924 |
+
data/0051/001149
|
925 |
+
data/0051/001150
|
926 |
+
data/0051/001160
|
927 |
+
data/0051/001182
|
928 |
+
data/0051/001220
|
929 |
+
data/0051/001251
|
930 |
+
data/0051/001275
|
931 |
+
data/0051/001316
|
932 |
+
data/0051/001330
|
933 |
+
data/0051/001339
|
934 |
+
data/0051/001366
|
935 |
+
data/0051/001368
|
936 |
+
data/0051/001393
|
937 |
+
data/0051/001399
|
938 |
+
data/0051/001424
|
939 |
+
data/0051/001431
|
940 |
+
data/0051/001445
|
941 |
+
data/0051/001452
|
942 |
+
data/0051/001471
|
943 |
+
data/0051/001474
|
944 |
+
data/0051/001497
|
945 |
+
data/0051/001499
|
946 |
+
data/0051/001518
|
947 |
+
data/0051/001521
|
948 |
+
data/0051/001528
|
949 |
+
data/0051/001531
|
950 |
+
data/0051/001536
|
951 |
+
data/0051/001545
|
952 |
+
data/0051/001566
|
953 |
+
data/0051/001571
|
954 |
+
data/0051/001584
|
955 |
+
data/0051/001588
|
956 |
+
data/0051/001603
|
957 |
+
data/0051/001611
|
958 |
+
data/0051/001617
|
959 |
+
data/0051/001628
|
960 |
+
data/0051/001629
|
961 |
+
data/0051/001645
|
962 |
+
data/0051/001650
|
963 |
+
data/0051/001673
|
964 |
+
data/0051/001674
|
965 |
+
data/0051/001687
|
966 |
+
data/0051/001693
|
967 |
+
data/0051/001698
|
968 |
+
data/0051/001699
|
969 |
+
data/0051/001700
|
970 |
+
data/0051/001708
|
971 |
+
data/0051/001709
|
972 |
+
data/0051/001715
|
973 |
+
data/0051/001717
|
974 |
+
data/0051/001729
|
975 |
+
data/0051/001734
|
976 |
+
data/0051/001749
|
977 |
+
data/0051/001753
|
978 |
+
data/0051/001767
|
979 |
+
data/0051/001780
|
980 |
+
data/0051/001788
|
981 |
+
data/0051/001803
|
982 |
+
data/0051/001819
|
983 |
+
data/0051/001833
|
984 |
+
data/0051/001867
|
985 |
+
data/0051/001883
|
986 |
+
data/0051/001914
|
987 |
+
data/0051/001924
|
988 |
+
data/0051/001927
|
989 |
+
data/0051/001932
|
990 |
+
data/0051/001968
|
991 |
+
data/0051/001990
|
992 |
+
data/0051/001996
|
993 |
+
data/0052/000001
|
994 |
+
data/0052/000021
|
995 |
+
data/0052/000034
|
996 |
+
data/0052/000039
|
997 |
+
data/0052/000048
|
998 |
+
data/0052/000052
|
999 |
+
data/0052/000057
|
1000 |
+
data/0052/000061
|
1001 |
+
data/0052/000065
|
1002 |
+
data/0052/000071
|
1003 |
+
data/0052/000080
|
1004 |
+
data/0052/000081
|
1005 |
+
data/0052/000084
|
1006 |
+
data/0052/000088
|
1007 |
+
data/0052/000092
|
1008 |
+
data/0052/000097
|
1009 |
+
data/0052/000098
|
1010 |
+
data/0052/000114
|
1011 |
+
data/0052/000116
|
1012 |
+
data/0052/000121
|
1013 |
+
data/0052/000127
|
1014 |
+
data/0052/000132
|
1015 |
+
data/0052/000136
|
1016 |
+
data/0052/000143
|
1017 |
+
data/0052/000150
|
1018 |
+
data/0052/000156
|
1019 |
+
data/0052/000164
|
1020 |
+
data/0052/000165
|
1021 |
+
data/0052/000167
|
1022 |
+
data/0052/000169
|
1023 |
+
data/0052/000170
|
1024 |
+
data/0052/000171
|
1025 |
+
data/0052/000173
|
1026 |
+
data/0052/000175
|
1027 |
+
data/0052/000178
|
1028 |
+
data/0052/000179
|
1029 |
+
data/0052/000190
|
1030 |
+
data/0052/000195
|
1031 |
+
data/0052/000202
|
1032 |
+
data/0052/000213
|
1033 |
+
data/0052/000214
|
1034 |
+
data/0052/000227
|
1035 |
+
data/0052/000236
|
1036 |
+
data/0052/000245
|
1037 |
+
data/0052/000256
|
1038 |
+
data/0052/000257
|
1039 |
+
data/0052/000259
|
1040 |
+
data/0052/000265
|
1041 |
+
data/0052/000266
|
1042 |
+
data/0052/000272
|
1043 |
+
data/0052/000275
|
1044 |
+
data/0052/000282
|
1045 |
+
data/0052/000292
|
1046 |
+
data/0052/000293
|
1047 |
+
data/0052/000304
|
1048 |
+
data/0052/000313
|
1049 |
+
data/0052/000325
|
1050 |
+
data/0052/000341
|
1051 |
+
data/0052/000343
|
1052 |
+
data/0052/000357
|
1053 |
+
data/0052/000361
|
1054 |
+
data/0052/000365
|
1055 |
+
data/0052/000385
|
1056 |
+
data/0052/000388
|
1057 |
+
data/0052/000389
|
1058 |
+
data/0052/000396
|
1059 |
+
data/0052/000403
|
1060 |
+
data/0052/000407
|
1061 |
+
data/0052/000413
|
1062 |
+
data/0052/000414
|
1063 |
+
data/0052/000418
|
1064 |
+
data/0052/000426
|
1065 |
+
data/0052/000432
|
1066 |
+
data/0052/000434
|
1067 |
+
data/0052/000442
|
1068 |
+
data/0052/000448
|
1069 |
+
data/0052/000451
|
1070 |
+
data/0052/000483
|
1071 |
+
data/0052/000484
|
1072 |
+
data/0052/000493
|
1073 |
+
data/0052/000494
|
1074 |
+
data/0052/000502
|
1075 |
+
data/0052/000511
|
1076 |
+
data/0052/000516
|
1077 |
+
data/0052/000520
|
1078 |
+
data/0052/000524
|
1079 |
+
data/0052/000526
|
1080 |
+
data/0052/000527
|
1081 |
+
data/0052/000532
|
1082 |
+
data/0052/000537
|
1083 |
+
data/0052/000539
|
1084 |
+
data/0052/000543
|
1085 |
+
data/0052/000544
|
1086 |
+
data/0052/000550
|
1087 |
+
data/0052/000556
|
1088 |
+
data/0052/000561
|
1089 |
+
data/0052/000567
|
1090 |
+
data/0052/000574
|
1091 |
+
data/0052/000575
|
1092 |
+
data/0052/000582
|
1093 |
+
data/0052/000587
|
1094 |
+
data/0052/000589
|
1095 |
+
data/0052/000593
|
1096 |
+
data/0052/000594
|
1097 |
+
data/0052/000604
|
1098 |
+
data/0052/000608
|
1099 |
+
data/0052/000622
|
1100 |
+
data/0052/000625
|
1101 |
+
data/0052/000632
|
1102 |
+
data/0052/000638
|
1103 |
+
data/0052/000643
|
1104 |
+
data/0052/000650
|
1105 |
+
data/0052/000653
|
1106 |
+
data/0052/000656
|
1107 |
+
data/0052/000662
|
1108 |
+
data/0052/000666
|
1109 |
+
data/0052/000669
|
1110 |
+
data/0052/000678
|
1111 |
+
data/0052/000688
|
1112 |
+
data/0052/000694
|
1113 |
+
data/0052/000699
|
1114 |
+
data/0052/000707
|
1115 |
+
data/0052/000714
|
1116 |
+
data/0052/000720
|
1117 |
+
data/0052/000722
|
1118 |
+
data/0052/000725
|
1119 |
+
data/0052/000732
|
1120 |
+
data/0052/000733
|
1121 |
+
data/0052/000741
|
1122 |
+
data/0052/000742
|
1123 |
+
data/0052/000745
|
1124 |
+
data/0052/000747
|
1125 |
+
data/0052/000750
|
1126 |
+
data/0052/000751
|
1127 |
+
data/0052/000752
|
1128 |
+
data/0052/000756
|
1129 |
+
data/0052/000758
|
1130 |
+
data/0052/000759
|
1131 |
+
data/0052/000760
|
1132 |
+
data/0052/000763
|
1133 |
+
data/0052/000765
|
1134 |
+
data/0052/000770
|
1135 |
+
data/0052/000771
|
1136 |
+
data/0052/000774
|
1137 |
+
data/0052/000777
|
1138 |
+
data/0052/000790
|
1139 |
+
data/0052/000799
|
1140 |
+
data/0052/000806
|
1141 |
+
data/0052/000810
|
1142 |
+
data/0052/000813
|
1143 |
+
data/0052/000815
|
1144 |
+
data/0052/000817
|
1145 |
+
data/0052/000820
|
1146 |
+
data/0052/000823
|
1147 |
+
data/0052/000825
|
1148 |
+
data/0052/000826
|
1149 |
+
data/0052/000833
|
1150 |
+
data/0052/000844
|
1151 |
+
data/0052/000853
|
1152 |
+
data/0052/000855
|
1153 |
+
data/0052/000860
|
1154 |
+
data/0052/000875
|
1155 |
+
data/0052/000880
|
1156 |
+
data/0052/000885
|
1157 |
+
data/0052/000886
|
1158 |
+
data/0052/000892
|
1159 |
+
data/0052/000897
|
1160 |
+
data/0053/000001
|
1161 |
+
data/0053/000014
|
1162 |
+
data/0053/000033
|
1163 |
+
data/0053/000043
|
1164 |
+
data/0053/000052
|
1165 |
+
data/0053/000061
|
1166 |
+
data/0053/000075
|
1167 |
+
data/0053/000084
|
1168 |
+
data/0053/000093
|
1169 |
+
data/0053/000103
|
1170 |
+
data/0053/000111
|
1171 |
+
data/0053/000118
|
1172 |
+
data/0053/000123
|
1173 |
+
data/0053/000126
|
1174 |
+
data/0053/000131
|
1175 |
+
data/0053/000135
|
1176 |
+
data/0053/000138
|
1177 |
+
data/0053/000139
|
1178 |
+
data/0053/000142
|
1179 |
+
data/0053/000145
|
1180 |
+
data/0053/000147
|
1181 |
+
data/0053/000152
|
1182 |
+
data/0053/000153
|
1183 |
+
data/0053/000155
|
1184 |
+
data/0053/000158
|
1185 |
+
data/0053/000162
|
1186 |
+
data/0053/000164
|
1187 |
+
data/0053/000165
|
1188 |
+
data/0053/000167
|
1189 |
+
data/0053/000168
|
1190 |
+
data/0053/000171
|
1191 |
+
data/0053/000176
|
1192 |
+
data/0053/000179
|
1193 |
+
data/0053/000183
|
1194 |
+
data/0053/000187
|
1195 |
+
data/0053/000190
|
1196 |
+
data/0053/000193
|
1197 |
+
data/0053/000196
|
1198 |
+
data/0053/000199
|
1199 |
+
data/0053/000204
|
1200 |
+
data/0053/000207
|
1201 |
+
data/0053/000210
|
1202 |
+
data/0053/000213
|
1203 |
+
data/0053/000216
|
1204 |
+
data/0053/000219
|
1205 |
+
data/0053/000221
|
1206 |
+
data/0053/000225
|
1207 |
+
data/0053/000227
|
1208 |
+
data/0053/000230
|
1209 |
+
data/0053/000243
|
1210 |
+
data/0053/000247
|
1211 |
+
data/0053/000250
|
1212 |
+
data/0053/000254
|
1213 |
+
data/0053/000256
|
1214 |
+
data/0053/000260
|
1215 |
+
data/0053/000263
|
1216 |
+
data/0053/000268
|
1217 |
+
data/0053/000272
|
1218 |
+
data/0053/000274
|
1219 |
+
data/0053/000281
|
1220 |
+
data/0053/000285
|
1221 |
+
data/0053/000289
|
1222 |
+
data/0053/000293
|
1223 |
+
data/0053/000295
|
1224 |
+
data/0053/000299
|
1225 |
+
data/0053/000301
|
1226 |
+
data/0053/000303
|
1227 |
+
data/0053/000308
|
1228 |
+
data/0053/000309
|
1229 |
+
data/0053/000314
|
1230 |
+
data/0053/000315
|
1231 |
+
data/0053/000316
|
1232 |
+
data/0053/000318
|
1233 |
+
data/0053/000319
|
1234 |
+
data/0053/000324
|
1235 |
+
data/0053/000328
|
1236 |
+
data/0053/000330
|
1237 |
+
data/0053/000340
|
1238 |
+
data/0053/000341
|
1239 |
+
data/0053/000345
|
1240 |
+
data/0053/000348
|
1241 |
+
data/0053/000356
|
1242 |
+
data/0053/000357
|
1243 |
+
data/0053/000364
|
1244 |
+
data/0053/000366
|
1245 |
+
data/0053/000367
|
1246 |
+
data/0053/000373
|
1247 |
+
data/0053/000374
|
1248 |
+
data/0053/000407
|
1249 |
+
data/0053/000414
|
1250 |
+
data/0053/000421
|
1251 |
+
data/0053/000425
|
1252 |
+
data/0053/000430
|
1253 |
+
data/0053/000434
|
1254 |
+
data/0053/000435
|
1255 |
+
data/0053/000439
|
1256 |
+
data/0053/000441
|
1257 |
+
data/0053/000443
|
1258 |
+
data/0053/000449
|
1259 |
+
data/0053/000452
|
1260 |
+
data/0053/000454
|
1261 |
+
data/0053/000455
|
1262 |
+
data/0053/000457
|
1263 |
+
data/0053/000458
|
1264 |
+
data/0053/000459
|
1265 |
+
data/0053/000460
|
1266 |
+
data/0053/000463
|
1267 |
+
data/0053/000467
|
1268 |
+
data/0053/000468
|
1269 |
+
data/0053/000469
|
1270 |
+
data/0053/000471
|
1271 |
+
data/0053/000472
|
1272 |
+
data/0053/000474
|
1273 |
+
data/0053/000476
|
1274 |
+
data/0053/000477
|
1275 |
+
data/0053/000479
|
1276 |
+
data/0053/000483
|
1277 |
+
data/0053/000484
|
1278 |
+
data/0053/000486
|
1279 |
+
data/0053/000490
|
1280 |
+
data/0053/000501
|
1281 |
+
data/0053/000505
|
1282 |
+
data/0053/000508
|
1283 |
+
data/0053/000509
|
1284 |
+
data/0053/000513
|
1285 |
+
data/0053/000519
|
1286 |
+
data/0053/000523
|
1287 |
+
data/0053/000526
|
1288 |
+
data/0053/000533
|
1289 |
+
data/0053/000534
|
1290 |
+
data/0053/000538
|
1291 |
+
data/0053/000539
|
1292 |
+
data/0053/000542
|
1293 |
+
data/0053/000545
|
1294 |
+
data/0053/000547
|
1295 |
+
data/0053/000550
|
1296 |
+
data/0053/000551
|
1297 |
+
data/0053/000552
|
1298 |
+
data/0053/000555
|
1299 |
+
data/0053/000556
|
1300 |
+
data/0053/000567
|
1301 |
+
data/0053/000569
|
1302 |
+
data/0053/000576
|
1303 |
+
data/0053/000579
|
1304 |
+
data/0053/000583
|
1305 |
+
data/0053/000587
|
1306 |
+
data/0053/000591
|
1307 |
+
data/0053/000592
|
1308 |
+
data/0053/000602
|
1309 |
+
data/0053/000604
|
1310 |
+
data/0053/000610
|
1311 |
+
data/0053/000616
|
1312 |
+
data/0053/000628
|
1313 |
+
data/0053/000638
|
1314 |
+
data/0053/000647
|
1315 |
+
data/0053/000655
|
1316 |
+
data/0053/000665
|
1317 |
+
data/0053/000677
|
1318 |
+
data/0053/000686
|
1319 |
+
data/0053/000687
|
1320 |
+
data/0053/000692
|
1321 |
+
data/0053/000700
|
1322 |
+
data/0053/000708
|
1323 |
+
data/0053/000716
|
1324 |
+
data/0053/000722
|
1325 |
+
data/0053/000730
|
1326 |
+
data/0053/000732
|
1327 |
+
data/0053/000735
|
1328 |
+
data/0053/000738
|
1329 |
+
data/0053/000740
|
1330 |
+
data/0053/000742
|
1331 |
+
data/0053/000752
|
1332 |
+
data/0053/000768
|
1333 |
+
data/0053/000779
|
1334 |
+
data/0053/000789
|
1335 |
+
data/0053/000797
|
1336 |
+
data/0053/000802
|
1337 |
+
data/0053/000817
|
1338 |
+
data/0053/000834
|
1339 |
+
data/0054/000001
|
1340 |
+
data/0054/000022
|
1341 |
+
data/0054/000032
|
1342 |
+
data/0054/000038
|
1343 |
+
data/0054/000043
|
1344 |
+
data/0054/000050
|
1345 |
+
data/0054/000056
|
1346 |
+
data/0054/000063
|
1347 |
+
data/0054/000069
|
1348 |
+
data/0054/000081
|
1349 |
+
data/0054/000086
|
1350 |
+
data/0054/000090
|
1351 |
+
data/0054/000092
|
1352 |
+
data/0054/000098
|
1353 |
+
data/0054/000099
|
1354 |
+
data/0054/000101
|
1355 |
+
data/0054/000105
|
1356 |
+
data/0054/000110
|
1357 |
+
data/0054/000114
|
1358 |
+
data/0054/000118
|
1359 |
+
data/0054/000121
|
1360 |
+
data/0054/000124
|
1361 |
+
data/0054/000126
|
1362 |
+
data/0054/000127
|
1363 |
+
data/0054/000130
|
1364 |
+
data/0054/000132
|
1365 |
+
data/0054/000133
|
1366 |
+
data/0054/000134
|
1367 |
+
data/0054/000135
|
1368 |
+
data/0054/000136
|
1369 |
+
data/0054/000137
|
1370 |
+
data/0054/000139
|
1371 |
+
data/0054/000140
|
1372 |
+
data/0054/000142
|
1373 |
+
data/0054/000143
|
1374 |
+
data/0054/000146
|
1375 |
+
data/0054/000147
|
1376 |
+
data/0054/000152
|
1377 |
+
data/0054/000153
|
1378 |
+
data/0054/000159
|
1379 |
+
data/0054/000161
|
1380 |
+
data/0054/000173
|
1381 |
+
data/0054/000175
|
1382 |
+
data/0054/000184
|
1383 |
+
data/0054/000186
|
1384 |
+
data/0054/000191
|
1385 |
+
data/0054/000195
|
1386 |
+
data/0054/000196
|
1387 |
+
data/0054/000202
|
1388 |
+
data/0054/000204
|
1389 |
+
data/0054/000209
|
1390 |
+
data/0054/000214
|
1391 |
+
data/0054/000223
|
1392 |
+
data/0054/000232
|
1393 |
+
data/0054/000236
|
1394 |
+
data/0054/000244
|
1395 |
+
data/0054/000249
|
1396 |
+
data/0054/000255
|
1397 |
+
data/0054/000259
|
1398 |
+
data/0054/000266
|
1399 |
+
data/0054/000269
|
1400 |
+
data/0054/000313
|
1401 |
+
data/0054/000334
|
1402 |
+
data/0054/000345
|
1403 |
+
data/0054/000350
|
1404 |
+
data/0054/000359
|
1405 |
+
data/0054/000365
|
1406 |
+
data/0054/000366
|
1407 |
+
data/0054/000376
|
1408 |
+
data/0054/000378
|
1409 |
+
data/0054/000385
|
1410 |
+
data/0054/000387
|
1411 |
+
data/0054/000390
|
1412 |
+
data/0054/000393
|
1413 |
+
data/0054/000398
|
1414 |
+
data/0054/000405
|
1415 |
+
data/0054/000411
|
1416 |
+
data/0054/000413
|
1417 |
+
data/0054/000414
|
1418 |
+
data/0054/000420
|
1419 |
+
data/0054/000423
|
1420 |
+
data/0054/000426
|
1421 |
+
data/0054/000433
|
1422 |
+
data/0054/000437
|
1423 |
+
data/0054/000445
|
1424 |
+
data/0054/000447
|
1425 |
+
data/0054/000457
|
1426 |
+
data/0054/000460
|
1427 |
+
data/0054/000470
|
1428 |
+
data/0054/000473
|
1429 |
+
data/0054/000479
|
1430 |
+
data/0054/000480
|
1431 |
+
data/0054/000483
|
1432 |
+
data/0054/000491
|
1433 |
+
data/0054/000495
|
1434 |
+
data/0054/000501
|
1435 |
+
data/0054/000504
|
1436 |
+
data/0054/000511
|
1437 |
+
data/0054/000514
|
1438 |
+
data/0054/000520
|
1439 |
+
data/0054/000524
|
1440 |
+
data/0054/000530
|
1441 |
+
data/0054/000537
|
1442 |
+
data/0054/000543
|
1443 |
+
data/0054/000560
|
1444 |
+
data/0054/000561
|
1445 |
+
data/0054/000568
|
1446 |
+
data/0054/000577
|
1447 |
+
data/0054/000583
|
1448 |
+
data/0054/000588
|
1449 |
+
data/0054/000589
|
1450 |
+
data/0054/000595
|
1451 |
+
data/0054/000598
|
1452 |
+
data/0054/000603
|
1453 |
+
data/0054/000604
|
1454 |
+
data/0054/000609
|
1455 |
+
data/0054/000611
|
1456 |
+
data/0054/000615
|
1457 |
+
data/0054/000619
|
1458 |
+
data/0054/000620
|
1459 |
+
data/0054/000624
|
1460 |
+
data/0054/000626
|
1461 |
+
data/0054/000628
|
1462 |
+
data/0054/000634
|
1463 |
+
data/0054/000636
|
1464 |
+
data/0054/000642
|
1465 |
+
data/0054/000644
|
1466 |
+
data/0054/000649
|
1467 |
+
data/0054/000653
|
1468 |
+
data/0054/000656
|
1469 |
+
data/0054/000661
|
1470 |
+
data/0054/000666
|
1471 |
+
data/0054/000672
|
1472 |
+
data/0054/000673
|
1473 |
+
data/0054/000679
|
1474 |
+
data/0054/000681
|
1475 |
+
data/0054/000687
|
1476 |
+
data/0054/000689
|
1477 |
+
data/0054/000693
|
1478 |
+
data/0054/000697
|
1479 |
+
data/0054/000701
|
1480 |
+
data/0054/000705
|
1481 |
+
data/0054/000707
|
1482 |
+
data/0054/000711
|
1483 |
+
data/0054/000715
|
1484 |
+
data/0054/000719
|
1485 |
+
data/0054/000721
|
1486 |
+
data/0054/000724
|
1487 |
+
data/0054/000725
|
1488 |
+
data/0054/000730
|
1489 |
+
data/0054/000731
|
1490 |
+
data/0054/000736
|
1491 |
+
data/0054/000741
|
1492 |
+
data/0054/000746
|
1493 |
+
data/0054/000748
|
1494 |
+
data/0054/000752
|
1495 |
+
data/0054/000756
|
1496 |
+
data/0054/000766
|
1497 |
+
data/0054/000777
|
1498 |
+
data/0054/000786
|
1499 |
+
data/0054/000800
|
1500 |
+
data/0054/000802
|
1501 |
+
data/0054/000817
|
1502 |
+
data/0054/000819
|
1503 |
+
data/0054/000830
|
1504 |
+
data/0054/000831
|
1505 |
+
data/0054/000848
|
1506 |
+
data/0054/000857
|
1507 |
+
data/0054/000866
|
1508 |
+
data/0054/000868
|
1509 |
+
data/0054/000875
|
1510 |
+
data/0054/000877
|
1511 |
+
data/0054/000902
|
1512 |
+
data/0054/000915
|
1513 |
+
data/0054/000920
|
1514 |
+
data/0054/000923
|
1515 |
+
data/0054/000932
|
1516 |
+
data/0054/000934
|
1517 |
+
data/0054/000952
|
1518 |
+
data/0054/000956
|
1519 |
+
data/0054/000975
|
1520 |
+
data/0054/000983
|
1521 |
+
data/0054/000988
|
1522 |
+
data/0054/000993
|
1523 |
+
data/0054/000998
|
1524 |
+
data/0054/001002
|
1525 |
+
data/0054/001006
|
1526 |
+
data/0054/001008
|
1527 |
+
data/0054/001011
|
1528 |
+
data/0054/001012
|
1529 |
+
data/0054/001015
|
1530 |
+
data/0054/001016
|
1531 |
+
data/0054/001017
|
1532 |
+
data/0054/001019
|
1533 |
+
data/0054/001020
|
1534 |
+
data/0054/001024
|
1535 |
+
data/0054/001025
|
1536 |
+
data/0054/001028
|
1537 |
+
data/0054/001029
|
1538 |
+
data/0054/001032
|
1539 |
+
data/0054/001033
|
1540 |
+
data/0054/001039
|
1541 |
+
data/0054/001042
|
1542 |
+
data/0054/001063
|
1543 |
+
data/0054/001068
|
1544 |
+
data/0054/001080
|
1545 |
+
data/0054/001083
|
1546 |
+
data/0054/001089
|
1547 |
+
data/0054/001096
|
1548 |
+
data/0054/001106
|
1549 |
+
data/0054/001110
|
1550 |
+
data/0054/001111
|
1551 |
+
data/0054/001123
|
1552 |
+
data/0054/001129
|
1553 |
+
data/0054/001134
|
1554 |
+
data/0054/001135
|
1555 |
+
data/0054/001139
|
1556 |
+
data/0054/001144
|
1557 |
+
data/0054/001145
|
1558 |
+
data/0054/001152
|
1559 |
+
data/0054/001153
|
1560 |
+
data/0054/001159
|
1561 |
+
data/0054/001160
|
1562 |
+
data/0054/001162
|
1563 |
+
data/0054/001166
|
1564 |
+
data/0054/001168
|
1565 |
+
data/0054/001169
|
1566 |
+
data/0054/001174
|
1567 |
+
data/0054/001175
|
1568 |
+
data/0054/001178
|
1569 |
+
data/0054/001183
|
1570 |
+
data/0054/001184
|
1571 |
+
data/0054/001191
|
1572 |
+
data/0054/001194
|
1573 |
+
data/0054/001210
|
1574 |
+
data/0054/001215
|
1575 |
+
data/0054/001232
|
1576 |
+
data/0054/001246
|
1577 |
+
data/0054/001257
|
1578 |
+
data/0054/001266
|
1579 |
+
data/0054/001277
|
1580 |
+
data/0054/001284
|
1581 |
+
data/0054/001302
|
1582 |
+
data/0054/001307
|
1583 |
+
data/0054/001313
|
1584 |
+
data/0054/001317
|
1585 |
+
data/0054/001321
|
1586 |
+
data/0054/001333
|
1587 |
+
data/0054/001348
|
1588 |
+
data/0054/001352
|
1589 |
+
data/0054/001355
|
1590 |
+
data/0054/001359
|
1591 |
+
data/0054/001361
|
1592 |
+
data/0054/001365
|
1593 |
+
data/0054/001369
|
1594 |
+
data/0054/001371
|
1595 |
+
data/0054/001385
|
1596 |
+
data/0054/001389
|
1597 |
+
data/0054/001407
|
1598 |
+
data/0054/001413
|
1599 |
+
data/0054/001438
|
1600 |
+
data/0054/001439
|
1601 |
+
data/0054/001451
|
1602 |
+
data/0054/001466
|
1603 |
+
data/0054/001491
|
1604 |
+
data/0054/001494
|
1605 |
+
data/0054/001509
|
1606 |
+
data/0054/001513
|
1607 |
+
data/0054/001515
|
1608 |
+
data/0054/001521
|
1609 |
+
data/0054/001526
|
1610 |
+
data/0054/001531
|
1611 |
+
data/0054/001539
|
1612 |
+
data/0054/001543
|
1613 |
+
data/0054/001547
|
1614 |
+
data/0054/001550
|
1615 |
+
data/0054/001557
|
1616 |
+
data/0054/001559
|
1617 |
+
data/0054/001568
|
1618 |
+
data/0054/001573
|
1619 |
+
data/0054/001580
|
1620 |
+
data/0054/001585
|
1621 |
+
data/0054/001595
|
1622 |
+
data/0054/001597
|
1623 |
+
data/0054/001605
|
1624 |
+
data/0054/001613
|
1625 |
+
data/0054/001619
|
1626 |
+
data/0054/001625
|
1627 |
+
data/0054/001631
|
1628 |
+
data/0054/001636
|
1629 |
+
data/0054/001649
|
1630 |
+
data/0054/001652
|
1631 |
+
data/0054/001661
|
1632 |
+
data/0054/001663
|
1633 |
+
data/0054/001668
|
1634 |
+
data/0054/001674
|
1635 |
+
data/0054/001681
|
1636 |
+
data/0054/001686
|
1637 |
+
data/0054/001699
|
1638 |
+
data/0054/001711
|
1639 |
+
data/0054/001717
|
1640 |
+
data/0054/001730
|
1641 |
+
data/0054/001731
|
1642 |
+
data/0054/001749
|
1643 |
+
data/0054/001753
|
1644 |
+
data/0054/001765
|
1645 |
+
data/0054/001774
|
1646 |
+
data/0054/001789
|
1647 |
+
data/0054/001801
|
1648 |
+
data/0054/001821
|
1649 |
+
data/0054/001842
|
1650 |
+
data/0054/001844
|
1651 |
+
data/0054/001854
|
1652 |
+
data/0054/001872
|
1653 |
+
data/0054/001882
|
1654 |
+
data/0054/001888
|
1655 |
+
data/0054/001900
|
1656 |
+
data/0054/001918
|
1657 |
+
data/0055/000001
|
1658 |
+
data/0055/000011
|
1659 |
+
data/0055/000018
|
1660 |
+
data/0055/000022
|
1661 |
+
data/0055/000032
|
1662 |
+
data/0055/000042
|
1663 |
+
data/0055/000049
|
1664 |
+
data/0055/000062
|
1665 |
+
data/0055/000078
|
1666 |
+
data/0055/000089
|
1667 |
+
data/0055/000094
|
1668 |
+
data/0055/000100
|
1669 |
+
data/0055/000107
|
1670 |
+
data/0055/000122
|
1671 |
+
data/0055/000128
|
1672 |
+
data/0055/000155
|
1673 |
+
data/0055/000179
|
1674 |
+
data/0055/000204
|
1675 |
+
data/0055/000205
|
1676 |
+
data/0055/000218
|
1677 |
+
data/0055/000219
|
1678 |
+
data/0055/000229
|
1679 |
+
data/0055/000231
|
1680 |
+
data/0055/000240
|
1681 |
+
data/0055/000245
|
1682 |
+
data/0055/000255
|
1683 |
+
data/0055/000259
|
1684 |
+
data/0055/000271
|
1685 |
+
data/0055/000273
|
1686 |
+
data/0055/000278
|
1687 |
+
data/0055/000289
|
1688 |
+
data/0055/000303
|
1689 |
+
data/0055/000305
|
1690 |
+
data/0055/000320
|
1691 |
+
data/0055/000346
|
1692 |
+
data/0055/000348
|
1693 |
+
data/0055/000352
|
1694 |
+
data/0055/000354
|
1695 |
+
data/0055/000355
|
1696 |
+
data/0055/000356
|
1697 |
+
data/0055/000358
|
1698 |
+
data/0055/000359
|
1699 |
+
data/0055/000360
|
1700 |
+
data/0055/000363
|
1701 |
+
data/0055/000369
|
1702 |
+
data/0055/000371
|
1703 |
+
data/0055/000376
|
1704 |
+
data/0055/000381
|
1705 |
+
data/0055/000385
|
1706 |
+
data/0055/000389
|
1707 |
+
data/0055/000391
|
1708 |
+
data/0055/000395
|
1709 |
+
data/0055/000398
|
1710 |
+
data/0055/000404
|
1711 |
+
data/0055/000407
|
1712 |
+
data/0055/000411
|
1713 |
+
data/0055/000415
|
1714 |
+
data/0055/000425
|
1715 |
+
data/0055/000434
|
1716 |
+
data/0055/000436
|
1717 |
+
data/0055/000446
|
1718 |
+
data/0055/000448
|
1719 |
+
data/0055/000454
|
1720 |
+
data/0055/000461
|
1721 |
+
data/0055/000464
|
1722 |
+
data/0055/000471
|
1723 |
+
data/0055/000479
|
1724 |
+
data/0055/000488
|
1725 |
+
data/0055/000495
|
1726 |
+
data/0055/000502
|
1727 |
+
data/0055/000512
|
1728 |
+
data/0055/000513
|
1729 |
+
data/0055/000526
|
1730 |
+
data/0055/000530
|
1731 |
+
data/0055/000546
|
1732 |
+
data/0055/000562
|
1733 |
+
data/0055/000563
|
1734 |
+
data/0055/000588
|
1735 |
+
data/0055/000598
|
1736 |
+
data/0055/000603
|
1737 |
+
data/0055/000610
|
1738 |
+
data/0055/000612
|
1739 |
+
data/0055/000618
|
1740 |
+
data/0055/000621
|
1741 |
+
data/0055/000622
|
1742 |
+
data/0055/000638
|
1743 |
+
data/0055/000641
|
1744 |
+
data/0055/000649
|
1745 |
+
data/0055/000654
|
1746 |
+
data/0055/000663
|
1747 |
+
data/0055/000668
|
1748 |
+
data/0055/000686
|
1749 |
+
data/0055/000687
|
1750 |
+
data/0055/000702
|
1751 |
+
data/0055/000710
|
1752 |
+
data/0055/000715
|
1753 |
+
data/0055/000724
|
1754 |
+
data/0055/000732
|
1755 |
+
data/0055/000739
|
1756 |
+
data/0055/000746
|
1757 |
+
data/0055/000747
|
1758 |
+
data/0055/000759
|
1759 |
+
data/0055/000761
|
1760 |
+
data/0055/000772
|
1761 |
+
data/0055/000785
|
1762 |
+
data/0055/000793
|
1763 |
+
data/0055/000813
|
1764 |
+
data/0055/000820
|
1765 |
+
data/0055/000829
|
1766 |
+
data/0055/000839
|
1767 |
+
data/0055/000846
|
1768 |
+
data/0055/000856
|
1769 |
+
data/0055/000859
|
1770 |
+
data/0055/000864
|
1771 |
+
data/0055/000869
|
1772 |
+
data/0055/000873
|
1773 |
+
data/0055/000879
|
1774 |
+
data/0055/000884
|
1775 |
+
data/0055/000890
|
1776 |
+
data/0055/000893
|
1777 |
+
data/0055/000901
|
1778 |
+
data/0055/000904
|
1779 |
+
data/0055/000905
|
1780 |
+
data/0055/000906
|
1781 |
+
data/0055/000908
|
1782 |
+
data/0055/000910
|
1783 |
+
data/0055/000912
|
1784 |
+
data/0055/000915
|
1785 |
+
data/0055/000920
|
1786 |
+
data/0055/000927
|
1787 |
+
data/0055/000937
|
1788 |
+
data/0055/000944
|
1789 |
+
data/0055/000951
|
1790 |
+
data/0055/000958
|
1791 |
+
data/0055/000968
|
1792 |
+
data/0055/000983
|
1793 |
+
data/0055/000989
|
1794 |
+
data/0055/000997
|
1795 |
+
data/0055/001009
|
1796 |
+
data/0055/001019
|
1797 |
+
data/0055/001025
|
1798 |
+
data/0055/001038
|
1799 |
+
data/0055/001040
|
1800 |
+
data/0055/001041
|
1801 |
+
data/0055/001042
|
1802 |
+
data/0055/001044
|
1803 |
+
data/0055/001046
|
1804 |
+
data/0055/001048
|
1805 |
+
data/0055/001049
|
1806 |
+
data/0055/001053
|
1807 |
+
data/0055/001055
|
1808 |
+
data/0055/001064
|
1809 |
+
data/0055/001068
|
1810 |
+
data/0055/001088
|
1811 |
+
data/0055/001100
|
1812 |
+
data/0055/001103
|
1813 |
+
data/0055/001115
|
1814 |
+
data/0055/001120
|
1815 |
+
data/0055/001132
|
1816 |
+
data/0055/001136
|
1817 |
+
data/0055/001147
|
1818 |
+
data/0055/001151
|
1819 |
+
data/0055/001161
|
1820 |
+
data/0055/001164
|
1821 |
+
data/0055/001176
|
1822 |
+
data/0055/001177
|
1823 |
+
data/0055/001187
|
1824 |
+
data/0055/001188
|
1825 |
+
data/0055/001189
|
1826 |
+
data/0055/001200
|
1827 |
+
data/0055/001202
|
1828 |
+
data/0055/001215
|
1829 |
+
data/0055/001216
|
1830 |
+
data/0055/001229
|
1831 |
+
data/0055/001237
|
1832 |
+
data/0055/001244
|
1833 |
+
data/0055/001252
|
1834 |
+
data/0055/001265
|
1835 |
+
data/0055/001266
|
1836 |
+
data/0055/001276
|
1837 |
+
data/0055/001283
|
1838 |
+
data/0055/001291
|
1839 |
+
data/0055/001298
|
1840 |
+
data/0055/001307
|
1841 |
+
data/0055/001308
|
1842 |
+
data/0055/001317
|
1843 |
+
data/0055/001327
|
1844 |
+
data/0055/001330
|
1845 |
+
data/0055/001336
|
1846 |
+
data/0055/001341
|
1847 |
+
data/0055/001347
|
1848 |
+
data/0055/001351
|
1849 |
+
data/0055/001355
|
1850 |
+
data/0055/001358
|
1851 |
+
data/0055/001359
|
1852 |
+
data/0055/001362
|
1853 |
+
data/0055/001366
|
1854 |
+
data/0055/001367
|
1855 |
+
data/0055/001369
|
1856 |
+
data/0055/001372
|
1857 |
+
data/0055/001374
|
1858 |
+
data/0055/001381
|
1859 |
+
data/0055/001390
|
1860 |
+
data/0055/001400
|
1861 |
+
data/0055/001407
|
1862 |
+
data/0055/001419
|
1863 |
+
data/0055/001426
|
1864 |
+
data/0055/001439
|
1865 |
+
data/0055/001453
|
1866 |
+
data/0055/001468
|
1867 |
+
data/0055/001478
|
1868 |
+
data/0055/001481
|
1869 |
+
data/0055/001489
|
1870 |
+
data/0055/001492
|
1871 |
+
data/0055/001503
|
1872 |
+
data/0055/001505
|
1873 |
+
data/0055/001520
|
1874 |
+
data/0055/001522
|
1875 |
+
data/0055/001532
|
1876 |
+
data/0055/001543
|
1877 |
+
data/0055/001563
|
1878 |
+
data/0055/001567
|
1879 |
+
data/0055/001585
|
1880 |
+
data/0055/001592
|
1881 |
+
data/0055/001603
|
1882 |
+
data/0055/001604
|
1883 |
+
data/0055/001609
|
1884 |
+
data/0055/001612
|
1885 |
+
data/0055/001625
|
1886 |
+
data/0055/001632
|
1887 |
+
data/0055/001643
|
1888 |
+
data/0055/001666
|
1889 |
+
data/0055/001691
|
1890 |
+
data/0055/001708
|
1891 |
+
data/0055/001717
|
1892 |
+
data/0055/001721
|
1893 |
+
data/0055/001746
|
1894 |
+
data/0055/001773
|
1895 |
+
data/0055/001785
|
1896 |
+
data/0055/001801
|
1897 |
+
data/0055/001811
|
1898 |
+
data/0055/001824
|
1899 |
+
data/0056/000001
|
1900 |
+
data/0056/000024
|
1901 |
+
data/0056/000032
|
1902 |
+
data/0056/000039
|
1903 |
+
data/0056/000046
|
1904 |
+
data/0056/000053
|
1905 |
+
data/0056/000062
|
1906 |
+
data/0056/000075
|
1907 |
+
data/0056/000085
|
1908 |
+
data/0056/000098
|
1909 |
+
data/0056/000104
|
1910 |
+
data/0056/000107
|
1911 |
+
data/0056/000116
|
1912 |
+
data/0056/000119
|
1913 |
+
data/0056/000128
|
1914 |
+
data/0056/000132
|
1915 |
+
data/0056/000140
|
1916 |
+
data/0056/000143
|
1917 |
+
data/0056/000150
|
1918 |
+
data/0056/000163
|
1919 |
+
data/0056/000170
|
1920 |
+
data/0056/000175
|
1921 |
+
data/0056/000190
|
1922 |
+
data/0056/000200
|
1923 |
+
data/0056/000207
|
1924 |
+
data/0056/000217
|
1925 |
+
data/0056/000232
|
1926 |
+
data/0056/000236
|
1927 |
+
data/0056/000246
|
1928 |
+
data/0056/000249
|
1929 |
+
data/0056/000254
|
1930 |
+
data/0056/000257
|
1931 |
+
data/0056/000260
|
1932 |
+
data/0056/000265
|
1933 |
+
data/0056/000267
|
1934 |
+
data/0056/000290
|
1935 |
+
data/0056/000293
|
1936 |
+
data/0056/000303
|
1937 |
+
data/0056/000314
|
1938 |
+
data/0056/000324
|
1939 |
+
data/0056/000329
|
1940 |
+
data/0056/000331
|
1941 |
+
data/0056/000333
|
1942 |
+
data/0056/000337
|
1943 |
+
data/0056/000342
|
1944 |
+
data/0056/000347
|
1945 |
+
data/0056/000348
|
1946 |
+
data/0056/000354
|
1947 |
+
data/0056/000355
|
1948 |
+
data/0056/000357
|
1949 |
+
data/0056/000375
|
1950 |
+
data/0056/000376
|
1951 |
+
data/0056/000381
|
1952 |
+
data/0056/000383
|
1953 |
+
data/0056/000414
|
1954 |
+
data/0056/000415
|
1955 |
+
data/0056/000425
|
1956 |
+
data/0056/000434
|
1957 |
+
data/0056/000436
|
1958 |
+
data/0056/000441
|
1959 |
+
data/0056/000444
|
1960 |
+
data/0056/000447
|
1961 |
+
data/0056/000451
|
1962 |
+
data/0056/000453
|
1963 |
+
data/0056/000458
|
1964 |
+
data/0056/000461
|
1965 |
+
data/0056/000468
|
1966 |
+
data/0056/000473
|
1967 |
+
data/0056/000479
|
1968 |
+
data/0056/000493
|
1969 |
+
data/0056/000497
|
1970 |
+
data/0056/000498
|
1971 |
+
data/0056/000500
|
1972 |
+
data/0056/000504
|
1973 |
+
data/0056/000509
|
1974 |
+
data/0056/000515
|
1975 |
+
data/0056/000517
|
1976 |
+
data/0056/000522
|
1977 |
+
data/0056/000527
|
1978 |
+
data/0056/000535
|
1979 |
+
data/0056/000538
|
1980 |
+
data/0056/000551
|
1981 |
+
data/0056/000553
|
1982 |
+
data/0056/000559
|
1983 |
+
data/0056/000563
|
1984 |
+
data/0056/000567
|
1985 |
+
data/0056/000570
|
1986 |
+
data/0056/000575
|
1987 |
+
data/0056/000578
|
1988 |
+
data/0056/000587
|
1989 |
+
data/0056/000589
|
1990 |
+
data/0056/000596
|
1991 |
+
data/0056/000598
|
1992 |
+
data/0056/000601
|
1993 |
+
data/0056/000603
|
1994 |
+
data/0056/000604
|
1995 |
+
data/0056/000608
|
1996 |
+
data/0056/000628
|
1997 |
+
data/0056/000631
|
1998 |
+
data/0056/000634
|
1999 |
+
data/0056/000644
|
2000 |
+
data/0056/000653
|
2001 |
+
data/0056/000664
|
2002 |
+
data/0056/000671
|
2003 |
+
data/0056/000676
|
2004 |
+
data/0056/000680
|
2005 |
+
data/0056/000685
|
2006 |
+
data/0056/000686
|
2007 |
+
data/0056/000697
|
2008 |
+
data/0056/000703
|
2009 |
+
data/0056/000715
|
2010 |
+
data/0056/000724
|
2011 |
+
data/0056/000730
|
2012 |
+
data/0056/000734
|
2013 |
+
data/0056/000746
|
2014 |
+
data/0056/000748
|
2015 |
+
data/0056/000751
|
2016 |
+
data/0056/000753
|
2017 |
+
data/0056/000754
|
2018 |
+
data/0056/000761
|
2019 |
+
data/0056/000764
|
2020 |
+
data/0056/000775
|
2021 |
+
data/0056/000793
|
2022 |
+
data/0056/000797
|
2023 |
+
data/0056/000805
|
2024 |
+
data/0056/000810
|
2025 |
+
data/0056/000814
|
2026 |
+
data/0056/000826
|
2027 |
+
data/0056/000834
|
2028 |
+
data/0056/000835
|
2029 |
+
data/0056/000840
|
2030 |
+
data/0056/000845
|
2031 |
+
data/0056/000846
|
2032 |
+
data/0056/000851
|
2033 |
+
data/0056/000858
|
2034 |
+
data/0056/000869
|
2035 |
+
data/0056/000878
|
2036 |
+
data/0056/000883
|
2037 |
+
data/0056/000886
|
2038 |
+
data/0056/000887
|
2039 |
+
data/0056/000894
|
2040 |
+
data/0056/000905
|
2041 |
+
data/0056/000936
|
2042 |
+
data/0056/000938
|
2043 |
+
data/0056/000952
|
2044 |
+
data/0056/000953
|
2045 |
+
data/0056/000956
|
2046 |
+
data/0056/000959
|
2047 |
+
data/0056/000960
|
2048 |
+
data/0056/000966
|
2049 |
+
data/0056/000968
|
2050 |
+
data/0056/000973
|
2051 |
+
data/0056/000977
|
2052 |
+
data/0056/000981
|
2053 |
+
data/0056/000986
|
2054 |
+
data/0056/000988
|
2055 |
+
data/0056/000990
|
2056 |
+
data/0056/000996
|
2057 |
+
data/0056/001001
|
2058 |
+
data/0056/001002
|
2059 |
+
data/0056/001014
|
2060 |
+
data/0056/001025
|
2061 |
+
data/0056/001028
|
2062 |
+
data/0056/001036
|
2063 |
+
data/0056/001037
|
2064 |
+
data/0056/001044
|
2065 |
+
data/0056/001046
|
2066 |
+
data/0056/001047
|
2067 |
+
data/0056/001048
|
2068 |
+
data/0056/001049
|
2069 |
+
data/0056/001054
|
2070 |
+
data/0056/001062
|
2071 |
+
data/0056/001064
|
2072 |
+
data/0056/001070
|
2073 |
+
data/0056/001071
|
2074 |
+
data/0056/001078
|
2075 |
+
data/0056/001087
|
2076 |
+
data/0056/001097
|
2077 |
+
data/0056/001108
|
2078 |
+
data/0056/001116
|
2079 |
+
data/0056/001124
|
2080 |
+
data/0056/001135
|
2081 |
+
data/0056/001146
|
2082 |
+
data/0056/001158
|
2083 |
+
data/0056/001171
|
2084 |
+
data/0056/001186
|
2085 |
+
data/0056/001197
|
2086 |
+
data/0056/001202
|
2087 |
+
data/0056/001220
|
2088 |
+
data/0057/000001
|
2089 |
+
data/0057/000024
|
2090 |
+
data/0057/000038
|
2091 |
+
data/0057/000049
|
2092 |
+
data/0057/000061
|
2093 |
+
data/0057/000074
|
2094 |
+
data/0057/000081
|
2095 |
+
data/0057/000088
|
2096 |
+
data/0057/000103
|
2097 |
+
data/0057/000117
|
2098 |
+
data/0057/000119
|
2099 |
+
data/0057/000123
|
2100 |
+
data/0057/000129
|
2101 |
+
data/0057/000136
|
2102 |
+
data/0057/000139
|
2103 |
+
data/0057/000148
|
2104 |
+
data/0057/000149
|
2105 |
+
data/0057/000157
|
2106 |
+
data/0057/000159
|
2107 |
+
data/0057/000166
|
2108 |
+
data/0057/000168
|
2109 |
+
data/0057/000181
|
2110 |
+
data/0057/000183
|
2111 |
+
data/0057/000187
|
2112 |
+
data/0057/000195
|
2113 |
+
data/0057/000201
|
2114 |
+
data/0057/000214
|
2115 |
+
data/0057/000217
|
2116 |
+
data/0057/000235
|
2117 |
+
data/0057/000240
|
2118 |
+
data/0057/000246
|
2119 |
+
data/0057/000252
|
2120 |
+
data/0057/000255
|
2121 |
+
data/0057/000265
|
2122 |
+
data/0057/000269
|
2123 |
+
data/0057/000277
|
2124 |
+
data/0057/000283
|
2125 |
+
data/0057/000324
|
2126 |
+
data/0057/000354
|
2127 |
+
data/0057/000386
|
2128 |
+
data/0057/000388
|
2129 |
+
data/0057/000397
|
2130 |
+
data/0057/000400
|
2131 |
+
data/0057/000408
|
2132 |
+
data/0057/000414
|
2133 |
+
data/0057/000424
|
2134 |
+
data/0057/000440
|
2135 |
+
data/0057/000445
|
2136 |
+
data/0057/000453
|
2137 |
+
data/0057/000456
|
2138 |
+
data/0057/000469
|
2139 |
+
data/0057/000473
|
2140 |
+
data/0057/000483
|
2141 |
+
data/0057/000486
|
2142 |
+
data/0057/000498
|
2143 |
+
data/0057/000499
|
2144 |
+
data/0057/000503
|
2145 |
+
data/0057/000518
|
2146 |
+
data/0057/000526
|
2147 |
+
data/0057/000528
|
2148 |
+
data/0057/000537
|
2149 |
+
data/0057/000541
|
2150 |
+
data/0057/000553
|
2151 |
+
data/0057/000554
|
2152 |
+
data/0057/000565
|
2153 |
+
data/0057/000574
|
2154 |
+
data/0057/000575
|
2155 |
+
data/0057/000584
|
2156 |
+
data/0057/000592
|
2157 |
+
data/0057/000597
|
2158 |
+
data/0057/000606
|
2159 |
+
data/0057/000610
|
2160 |
+
data/0057/000613
|
2161 |
+
data/0057/000618
|
2162 |
+
data/0057/000621
|
2163 |
+
data/0057/000622
|
2164 |
+
data/0057/000634
|
2165 |
+
data/0057/000638
|
2166 |
+
data/0057/000644
|
2167 |
+
data/0057/000645
|
2168 |
+
data/0057/000653
|
2169 |
+
data/0057/000654
|
2170 |
+
data/0057/000664
|
2171 |
+
data/0057/000665
|
2172 |
+
data/0057/000671
|
2173 |
+
data/0057/000674
|
2174 |
+
data/0057/000689
|
2175 |
+
data/0057/000696
|
2176 |
+
data/0057/000711
|
2177 |
+
data/0057/000712
|
2178 |
+
data/0057/000717
|
2179 |
+
data/0057/000735
|
2180 |
+
data/0057/000747
|
2181 |
+
data/0057/000768
|
2182 |
+
data/0057/000779
|
2183 |
+
data/0057/000780
|
2184 |
+
data/0057/000790
|
2185 |
+
data/0057/000791
|
2186 |
+
data/0057/000795
|
2187 |
+
data/0057/000801
|
2188 |
+
data/0057/000803
|
2189 |
+
data/0057/000809
|
2190 |
+
data/0057/000811
|
2191 |
+
data/0057/000818
|
2192 |
+
data/0057/000819
|
2193 |
+
data/0057/000824
|
2194 |
+
data/0057/000825
|
2195 |
+
data/0057/000827
|
2196 |
+
data/0057/000829
|
2197 |
+
data/0057/000830
|
2198 |
+
data/0057/000833
|
2199 |
+
data/0057/000836
|
2200 |
+
data/0057/000837
|
2201 |
+
data/0057/000839
|
2202 |
+
data/0057/000840
|
2203 |
+
data/0057/000841
|
2204 |
+
data/0057/000842
|
2205 |
+
data/0057/000844
|
2206 |
+
data/0057/000845
|
2207 |
+
data/0057/000847
|
2208 |
+
data/0057/000849
|
2209 |
+
data/0057/000851
|
2210 |
+
data/0057/000870
|
2211 |
+
data/0057/000877
|
2212 |
+
data/0057/000890
|
2213 |
+
data/0057/000897
|
2214 |
+
data/0057/000907
|
2215 |
+
data/0057/000928
|
2216 |
+
data/0057/000934
|
2217 |
+
data/0057/000935
|
2218 |
+
data/0057/000940
|
2219 |
+
data/0057/000942
|
2220 |
+
data/0057/000946
|
2221 |
+
data/0057/000951
|
2222 |
+
data/0057/000953
|
2223 |
+
data/0057/000958
|
2224 |
+
data/0057/000959
|
2225 |
+
data/0057/000964
|
2226 |
+
data/0057/000965
|
2227 |
+
data/0057/000971
|
2228 |
+
data/0057/000973
|
2229 |
+
data/0057/000978
|
2230 |
+
data/0057/000982
|
2231 |
+
data/0057/000983
|
2232 |
+
data/0057/000986
|
2233 |
+
data/0057/000990
|
2234 |
+
data/0057/000991
|
2235 |
+
data/0057/000998
|
2236 |
+
data/0057/001012
|
2237 |
+
data/0057/001026
|
2238 |
+
data/0057/001028
|
2239 |
+
data/0057/001035
|
2240 |
+
data/0057/001040
|
2241 |
+
data/0057/001046
|
2242 |
+
data/0057/001054
|
2243 |
+
data/0057/001057
|
2244 |
+
data/0057/001066
|
2245 |
+
data/0057/001071
|
2246 |
+
data/0057/001073
|
2247 |
+
data/0057/001076
|
2248 |
+
data/0057/001078
|
2249 |
+
data/0057/001081
|
2250 |
+
data/0057/001082
|
2251 |
+
data/0057/001083
|
2252 |
+
data/0057/001086
|
2253 |
+
data/0057/001087
|
2254 |
+
data/0057/001092
|
2255 |
+
data/0057/001093
|
2256 |
+
data/0057/001099
|
2257 |
+
data/0057/001101
|
2258 |
+
data/0057/001108
|
2259 |
+
data/0057/001116
|
2260 |
+
data/0057/001118
|
2261 |
+
data/0057/001131
|
2262 |
+
data/0057/001132
|
2263 |
+
data/0057/001135
|
2264 |
+
data/0057/001138
|
2265 |
+
data/0057/001141
|
2266 |
+
data/0057/001144
|
2267 |
+
data/0057/001148
|
2268 |
+
data/0057/001153
|
2269 |
+
data/0057/001158
|
2270 |
+
data/0057/001159
|
2271 |
+
data/0057/001161
|
2272 |
+
data/0057/001164
|
2273 |
+
data/0057/001166
|
2274 |
+
data/0057/001170
|
2275 |
+
data/0057/001173
|
2276 |
+
data/0057/001176
|
2277 |
+
data/0057/001181
|
2278 |
+
data/0057/001185
|
2279 |
+
data/0057/001190
|
2280 |
+
data/0057/001215
|
2281 |
+
data/0057/001245
|
2282 |
+
data/0057/001250
|
2283 |
+
data/0057/001257
|
2284 |
+
data/0057/001258
|
2285 |
+
data/0057/001268
|
2286 |
+
data/0057/001269
|
2287 |
+
data/0057/001275
|
2288 |
+
data/0057/001276
|
2289 |
+
data/0057/001282
|
2290 |
+
data/0057/001290
|
2291 |
+
data/0057/001292
|
2292 |
+
data/0057/001306
|
2293 |
+
data/0057/001313
|
2294 |
+
data/0057/001322
|
2295 |
+
data/0057/001325
|
2296 |
+
data/0057/001331
|
2297 |
+
data/0057/001336
|
2298 |
+
data/0057/001340
|
2299 |
+
data/0057/001347
|
2300 |
+
data/0057/001360
|
2301 |
+
data/0057/001361
|
2302 |
+
data/0057/001369
|
2303 |
+
data/0057/001381
|
2304 |
+
data/0057/001407
|
2305 |
+
data/0057/001422
|
2306 |
+
data/0057/001428
|
2307 |
+
data/0057/001434
|
2308 |
+
data/0057/001435
|
2309 |
+
data/0057/001440
|
2310 |
+
data/0057/001441
|
2311 |
+
data/0057/001442
|
2312 |
+
data/0057/001447
|
2313 |
+
data/0057/001458
|
2314 |
+
data/0057/001464
|
2315 |
+
data/0057/001465
|
2316 |
+
data/0057/001469
|
2317 |
+
data/0057/001471
|
2318 |
+
data/0057/001474
|
2319 |
+
data/0057/001476
|
2320 |
+
data/0057/001480
|
2321 |
+
data/0057/001482
|
2322 |
+
data/0057/001483
|
2323 |
+
data/0057/001490
|
2324 |
+
data/0057/001492
|
2325 |
+
data/0057/001498
|
2326 |
+
data/0057/001500
|
2327 |
+
data/0057/001511
|
2328 |
+
data/0057/001518
|
2329 |
+
data/0057/001527
|
2330 |
+
data/0057/001530
|
2331 |
+
data/0057/001531
|
2332 |
+
data/0057/001538
|
2333 |
+
data/0057/001539
|
2334 |
+
data/0057/001549
|
2335 |
+
data/0057/001560
|
2336 |
+
data/0057/001563
|
2337 |
+
data/0057/001566
|
2338 |
+
data/0057/001570
|
2339 |
+
data/0057/001585
|
2340 |
+
data/0057/001589
|
2341 |
+
data/0057/001593
|
2342 |
+
data/0057/001602
|
2343 |
+
data/0057/001612
|
2344 |
+
data/0057/001621
|
2345 |
+
data/0057/001628
|
2346 |
+
data/0057/001637
|
2347 |
+
data/0057/001647
|
2348 |
+
data/0057/001671
|
2349 |
+
data/0057/001681
|
2350 |
+
data/0057/001691
|
2351 |
+
data/0057/001700
|
2352 |
+
data/0057/001703
|
2353 |
+
data/0057/001705
|
2354 |
+
data/0057/001708
|
2355 |
+
data/0057/001709
|
2356 |
+
data/0057/001712
|
2357 |
+
data/0057/001713
|
2358 |
+
data/0057/001714
|
2359 |
+
data/0057/001722
|
2360 |
+
data/0057/001734
|
2361 |
+
data/0057/001737
|
2362 |
+
data/0057/001739
|
2363 |
+
data/0057/001740
|
2364 |
+
data/0057/001741
|
2365 |
+
data/0057/001742
|
2366 |
+
data/0057/001745
|
2367 |
+
data/0057/001747
|
2368 |
+
data/0057/001748
|
2369 |
+
data/0057/001752
|
2370 |
+
data/0057/001759
|
2371 |
+
data/0057/001774
|
2372 |
+
data/0057/001789
|
2373 |
+
data/0057/001814
|
2374 |
+
data/0057/001827
|
2375 |
+
data/0057/001835
|
2376 |
+
data/0058/000001
|
2377 |
+
data/0058/000021
|
2378 |
+
data/0058/000030
|
2379 |
+
data/0058/000040
|
2380 |
+
data/0058/000048
|
2381 |
+
data/0058/000061
|
2382 |
+
data/0058/000068
|
2383 |
+
data/0058/000077
|
2384 |
+
data/0058/000086
|
2385 |
+
data/0058/000094
|
2386 |
+
data/0058/000102
|
2387 |
+
data/0058/000103
|
2388 |
+
data/0058/000107
|
2389 |
+
data/0058/000109
|
2390 |
+
data/0058/000114
|
2391 |
+
data/0058/000116
|
2392 |
+
data/0058/000121
|
2393 |
+
data/0058/000124
|
2394 |
+
data/0058/000135
|
2395 |
+
data/0058/000137
|
2396 |
+
data/0058/000149
|
2397 |
+
data/0058/000154
|
2398 |
+
data/0058/000163
|
2399 |
+
data/0058/000166
|
2400 |
+
data/0058/000180
|
2401 |
+
data/0058/000181
|
2402 |
+
data/0058/000197
|
2403 |
+
data/0058/000201
|
2404 |
+
data/0058/000211
|
2405 |
+
data/0058/000215
|
2406 |
+
data/0058/000217
|
2407 |
+
data/0058/000221
|
2408 |
+
data/0058/000226
|
2409 |
+
data/0058/000237
|
2410 |
+
data/0058/000242
|
2411 |
+
data/0058/000247
|
2412 |
+
data/0058/000249
|
2413 |
+
data/0058/000255
|
2414 |
+
data/0058/000257
|
2415 |
+
data/0058/000262
|
2416 |
+
data/0058/000269
|
2417 |
+
data/0058/000278
|
2418 |
+
data/0058/000279
|
2419 |
+
data/0058/000287
|
2420 |
+
data/0058/000288
|
2421 |
+
data/0058/000300
|
2422 |
+
data/0058/000301
|
2423 |
+
data/0058/000313
|
2424 |
+
data/0058/000317
|
2425 |
+
data/0058/000319
|
2426 |
+
data/0058/000329
|
2427 |
+
data/0058/000332
|
2428 |
+
data/0058/000342
|
2429 |
+
data/0058/000354
|
2430 |
+
data/0058/000361
|
2431 |
+
data/0058/000369
|
2432 |
+
data/0058/000372
|
2433 |
+
data/0058/000382
|
2434 |
+
data/0058/000390
|
2435 |
+
data/0058/000395
|
2436 |
+
data/0058/000402
|
2437 |
+
data/0058/000404
|
2438 |
+
data/0058/000410
|
2439 |
+
data/0058/000414
|
2440 |
+
data/0058/000422
|
2441 |
+
data/0058/000423
|
2442 |
+
data/0058/000434
|
2443 |
+
data/0058/000443
|
2444 |
+
data/0058/000444
|
2445 |
+
data/0058/000445
|
2446 |
+
data/0058/000456
|
2447 |
+
data/0058/000460
|
2448 |
+
data/0058/000469
|
2449 |
+
data/0058/000472
|
2450 |
+
data/0058/000476
|
2451 |
+
data/0058/000484
|
2452 |
+
data/0058/000488
|
2453 |
+
data/0058/000495
|
2454 |
+
data/0058/000498
|
2455 |
+
data/0058/000505
|
2456 |
+
data/0058/000508
|
2457 |
+
data/0058/000516
|
2458 |
+
data/0058/000519
|
2459 |
+
data/0058/000529
|
2460 |
+
data/0058/000531
|
2461 |
+
data/0058/000541
|
2462 |
+
data/0058/000553
|
2463 |
+
data/0058/000561
|
2464 |
+
data/0058/000562
|
2465 |
+
data/0058/000574
|
2466 |
+
data/0058/000576
|
2467 |
+
data/0058/000583
|
2468 |
+
data/0058/000588
|
2469 |
+
data/0058/000596
|
2470 |
+
data/0058/000603
|
2471 |
+
data/0058/000610
|
2472 |
+
data/0058/000617
|
2473 |
+
data/0058/000631
|
2474 |
+
data/0058/000635
|
2475 |
+
data/0058/000642
|
2476 |
+
data/0058/000646
|
2477 |
+
data/0058/000653
|
2478 |
+
data/0058/000655
|
2479 |
+
data/0058/000660
|
2480 |
+
data/0058/000662
|
2481 |
+
data/0058/000670
|
2482 |
+
data/0058/000677
|
2483 |
+
data/0058/000688
|
2484 |
+
data/0058/000691
|
2485 |
+
data/0058/000694
|
2486 |
+
data/0058/000703
|
2487 |
+
data/0058/000712
|
2488 |
+
data/0058/000725
|
2489 |
+
data/0058/000730
|
2490 |
+
data/0058/000737
|
2491 |
+
data/0058/000743
|
2492 |
+
data/0058/000759
|
2493 |
+
data/0058/000763
|
2494 |
+
data/0058/000772
|
2495 |
+
data/0058/000776
|
2496 |
+
data/0058/000783
|
2497 |
+
data/0058/000785
|
2498 |
+
data/0058/000793
|
2499 |
+
data/0058/000794
|
2500 |
+
data/0058/000802
|
2501 |
+
data/0058/000813
|
2502 |
+
data/0058/000815
|
2503 |
+
data/0058/000824
|
2504 |
+
data/0058/000826
|
2505 |
+
data/0058/000838
|
2506 |
+
data/0058/000839
|
2507 |
+
data/0058/000849
|
2508 |
+
data/0058/000850
|
2509 |
+
data/0058/000857
|
2510 |
+
data/0058/000858
|
2511 |
+
data/0058/000866
|
2512 |
+
data/0058/000867
|
2513 |
+
data/0058/000874
|
2514 |
+
data/0058/000878
|
2515 |
+
data/0058/000891
|
2516 |
+
data/0058/000897
|
2517 |
+
data/0058/000905
|
2518 |
+
data/0058/000909
|
2519 |
+
data/0058/000920
|
2520 |
+
data/0058/000921
|
2521 |
+
data/0058/000931
|
2522 |
+
data/0058/000932
|
2523 |
+
data/0058/000940
|
2524 |
+
data/0058/000943
|
2525 |
+
data/0058/000952
|
2526 |
+
data/0058/000958
|
2527 |
+
data/0058/000967
|
2528 |
+
data/0058/000971
|
2529 |
+
data/0058/000983
|
2530 |
+
data/0058/000985
|
2531 |
+
data/0058/000999
|
2532 |
+
data/0058/001000
|
2533 |
+
data/0058/001012
|
2534 |
+
data/0058/001013
|
2535 |
+
data/0058/001023
|
2536 |
+
data/0058/001035
|
2537 |
+
data/0058/001036
|
2538 |
+
data/0058/001051
|
2539 |
+
data/0058/001068
|
2540 |
+
data/0058/001072
|
2541 |
+
data/0058/001082
|
2542 |
+
data/0058/001084
|
2543 |
+
data/0058/001094
|
2544 |
+
data/0058/001097
|
2545 |
+
data/0058/001113
|
2546 |
+
data/0058/001129
|
2547 |
+
data/0058/001144
|
2548 |
+
data/0058/001146
|
2549 |
+
data/0058/001164
|
2550 |
+
data/0058/001167
|
2551 |
+
data/0058/001177
|
2552 |
+
data/0058/001180
|
2553 |
+
data/0058/001196
|
2554 |
+
data/0058/001199
|
2555 |
+
data/0058/001216
|
2556 |
+
data/0058/001217
|
2557 |
+
data/0058/001248
|
2558 |
+
data/0058/001253
|
2559 |
+
data/0058/001274
|
2560 |
+
data/0058/001278
|
2561 |
+
data/0058/001289
|
2562 |
+
data/0058/001293
|
2563 |
+
data/0058/001301
|
2564 |
+
data/0058/001305
|
2565 |
+
data/0058/001324
|
2566 |
+
data/0058/001330
|
2567 |
+
data/0058/001393
|
2568 |
+
data/0058/001395
|
2569 |
+
data/0058/001416
|
2570 |
+
data/0058/001420
|
2571 |
+
data/0058/001426
|
2572 |
+
data/0058/001427
|
2573 |
+
data/0058/001434
|
2574 |
+
data/0058/001436
|
2575 |
+
data/0058/001447
|
2576 |
+
data/0058/001451
|
2577 |
+
data/0058/001467
|
2578 |
+
data/0058/001480
|
2579 |
+
data/0058/001504
|
2580 |
+
data/0058/001511
|
2581 |
+
data/0058/001529
|
2582 |
+
data/0058/001549
|
2583 |
+
data/0058/001584
|
2584 |
+
data/0058/001599
|
2585 |
+
data/0058/001614
|
2586 |
+
data/0058/001639
|
2587 |
+
data/0058/001669
|
2588 |
+
data/0058/001672
|
2589 |
+
data/0058/001684
|
2590 |
+
data/0059/000001
|
2591 |
+
data/0059/000029
|
2592 |
+
data/0059/000037
|
2593 |
+
data/0059/000055
|
2594 |
+
data/0059/000061
|
2595 |
+
data/0059/000068
|
2596 |
+
data/0059/000080
|
2597 |
+
data/0059/000088
|
2598 |
+
data/0059/000099
|
2599 |
+
data/0059/000114
|
2600 |
+
data/0059/000120
|
2601 |
+
data/0059/000126
|
2602 |
+
data/0059/000127
|
2603 |
+
data/0059/000132
|
2604 |
+
data/0059/000139
|
2605 |
+
data/0059/000142
|
2606 |
+
data/0059/000145
|
2607 |
+
data/0059/000148
|
2608 |
+
data/0059/000150
|
2609 |
+
data/0059/000151
|
2610 |
+
data/0059/000153
|
2611 |
+
data/0059/000154
|
2612 |
+
data/0059/000156
|
2613 |
+
data/0059/000158
|
2614 |
+
data/0059/000159
|
2615 |
+
data/0059/000160
|
2616 |
+
data/0059/000162
|
2617 |
+
data/0059/000163
|
2618 |
+
data/0059/000164
|
2619 |
+
data/0059/000165
|
2620 |
+
data/0059/000167
|
2621 |
+
data/0059/000169
|
2622 |
+
data/0059/000171
|
2623 |
+
data/0059/000176
|
2624 |
+
data/0059/000190
|
2625 |
+
data/0059/000196
|
2626 |
+
data/0059/000206
|
2627 |
+
data/0059/000210
|
2628 |
+
data/0059/000215
|
2629 |
+
data/0059/000224
|
2630 |
+
data/0059/000231
|
2631 |
+
data/0059/000232
|
2632 |
+
data/0059/000234
|
2633 |
+
data/0059/000236
|
2634 |
+
data/0059/000238
|
2635 |
+
data/0059/000239
|
2636 |
+
data/0059/000240
|
2637 |
+
data/0059/000241
|
2638 |
+
data/0059/000242
|
2639 |
+
data/0059/000243
|
2640 |
+
data/0059/000244
|
2641 |
+
data/0059/000245
|
2642 |
+
data/0059/000246
|
2643 |
+
data/0059/000248
|
2644 |
+
data/0059/000250
|
2645 |
+
data/0059/000252
|
2646 |
+
data/0059/000253
|
2647 |
+
data/0059/000255
|
2648 |
+
data/0059/000256
|
2649 |
+
data/0059/000260
|
2650 |
+
data/0059/000262
|
2651 |
+
data/0059/000265
|
2652 |
+
data/0059/000266
|
2653 |
+
data/0059/000268
|
2654 |
+
data/0059/000270
|
2655 |
+
data/0059/000274
|
2656 |
+
data/0059/000275
|
2657 |
+
data/0059/000279
|
2658 |
+
data/0059/000280
|
2659 |
+
data/0059/000284
|
2660 |
+
data/0059/000290
|
2661 |
+
data/0059/000294
|
2662 |
+
data/0059/000295
|
2663 |
+
data/0059/000299
|
2664 |
+
data/0059/000300
|
2665 |
+
data/0059/000304
|
2666 |
+
data/0059/000305
|
2667 |
+
data/0059/000309
|
2668 |
+
data/0059/000310
|
2669 |
+
data/0059/000315
|
2670 |
+
data/0059/000316
|
2671 |
+
data/0059/000320
|
2672 |
+
data/0059/000321
|
2673 |
+
data/0059/000323
|
2674 |
+
data/0059/000333
|
2675 |
+
data/0059/000346
|
2676 |
+
data/0059/000352
|
2677 |
+
data/0059/000358
|
2678 |
+
data/0059/000364
|
2679 |
+
data/0059/000376
|
2680 |
+
data/0059/000384
|
2681 |
+
data/0059/000391
|
2682 |
+
data/0059/000392
|
2683 |
+
data/0059/000395
|
2684 |
+
data/0059/000396
|
2685 |
+
data/0059/000400
|
2686 |
+
data/0059/000402
|
2687 |
+
data/0059/000403
|
2688 |
+
data/0059/000408
|
2689 |
+
data/0059/000412
|
2690 |
+
data/0059/000416
|
2691 |
+
data/0059/000419
|
2692 |
+
data/0059/000420
|
2693 |
+
data/0059/000422
|
2694 |
+
data/0059/000423
|
2695 |
+
data/0059/000425
|
2696 |
+
data/0059/000426
|
2697 |
+
data/0059/000427
|
2698 |
+
data/0059/000428
|
2699 |
+
data/0059/000429
|
2700 |
+
data/0059/000430
|
2701 |
+
data/0059/000431
|
2702 |
+
data/0059/000434
|
2703 |
+
data/0059/000435
|
2704 |
+
data/0059/000438
|
2705 |
+
data/0059/000439
|
2706 |
+
data/0059/000442
|
2707 |
+
data/0059/000444
|
2708 |
+
data/0059/000447
|
2709 |
+
data/0059/000451
|
2710 |
+
data/0059/000455
|
2711 |
+
data/0059/000459
|
2712 |
+
data/0059/000463
|
2713 |
+
data/0059/000467
|
2714 |
+
data/0059/000471
|
2715 |
+
data/0059/000475
|
2716 |
+
data/0059/000479
|
2717 |
+
data/0059/000480
|
2718 |
+
data/0059/000483
|
2719 |
+
data/0059/000484
|
2720 |
+
data/0059/000488
|
2721 |
+
data/0059/000492
|
2722 |
+
data/0059/000495
|
2723 |
+
data/0059/000496
|
2724 |
+
data/0059/000499
|
2725 |
+
data/0059/000503
|
2726 |
+
data/0059/000505
|
2727 |
+
data/0059/000506
|
2728 |
+
data/0059/000509
|
2729 |
+
data/0059/000510
|
2730 |
+
data/0059/000514
|
2731 |
+
data/0059/000515
|
2732 |
+
data/0059/000523
|
2733 |
+
data/0059/000525
|
2734 |
+
data/0059/000530
|
2735 |
+
data/0059/000535
|
2736 |
+
data/0059/000545
|
2737 |
+
data/0059/000561
|
2738 |
+
data/0059/000567
|
2739 |
+
data/0059/000615
|
2740 |
+
data/0059/000629
|
2741 |
+
data/0059/000636
|
2742 |
+
data/0059/000637
|
2743 |
+
data/0059/000648
|
2744 |
+
data/0059/000650
|
2745 |
+
data/0059/000656
|
2746 |
+
data/0059/000657
|
2747 |
+
data/0059/000661
|
2748 |
+
data/0059/000662
|
2749 |
+
data/0059/000669
|
2750 |
+
data/0059/000670
|
2751 |
+
data/0059/000675
|
2752 |
+
data/0059/000676
|
2753 |
+
data/0059/000687
|
2754 |
+
data/0059/000689
|
2755 |
+
data/0059/000692
|
2756 |
+
data/0059/000698
|
2757 |
+
data/0059/000699
|
2758 |
+
data/0059/000701
|
2759 |
+
data/0059/000703
|
2760 |
+
data/0059/000704
|
2761 |
+
data/0059/000708
|
2762 |
+
data/0059/000710
|
2763 |
+
data/0059/000712
|
2764 |
+
data/0059/000717
|
2765 |
+
data/0059/000721
|
2766 |
+
data/0059/000726
|
2767 |
+
data/0059/000729
|
2768 |
+
data/0059/000733
|
2769 |
+
data/0059/000736
|
2770 |
+
data/0059/000745
|
2771 |
+
data/0059/000748
|
2772 |
+
data/0059/000755
|
2773 |
+
data/0059/000764
|
2774 |
+
data/0059/000769
|
2775 |
+
data/0059/000772
|
2776 |
+
data/0059/000775
|
2777 |
+
data/0059/000777
|
2778 |
+
data/0059/000781
|
2779 |
+
data/0059/000788
|
2780 |
+
data/0059/000795
|
2781 |
+
data/0059/000799
|
2782 |
+
data/0059/000801
|
2783 |
+
data/0059/000802
|
2784 |
+
data/0059/000806
|
2785 |
+
data/0059/000809
|
2786 |
+
data/0059/000816
|
2787 |
+
data/0059/000822
|
2788 |
+
data/0059/000833
|
2789 |
+
data/0059/000835
|
2790 |
+
data/0059/000839
|
2791 |
+
data/0059/000842
|
2792 |
+
data/0059/000845
|
2793 |
+
data/0059/000848
|
2794 |
+
data/0059/000851
|
2795 |
+
data/0059/000856
|
2796 |
+
data/0059/000863
|
2797 |
+
data/0059/000868
|
2798 |
+
data/0059/000872
|
2799 |
+
data/0059/000875
|
2800 |
+
data/0059/000879
|
2801 |
+
data/0059/000904
|
2802 |
+
data/0059/000908
|
2803 |
+
data/0059/000910
|
2804 |
+
data/0059/000915
|
2805 |
+
data/0059/000918
|
2806 |
+
data/0059/000943
|
2807 |
+
data/0059/000957
|
2808 |
+
data/0059/000991
|
2809 |
+
data/0059/001001
|
2810 |
+
data/0059/001008
|
2811 |
+
data/0059/001017
|
2812 |
+
data/0059/001022
|
2813 |
+
data/0059/001026
|
2814 |
+
data/0059/001027
|
2815 |
+
data/0059/001032
|
2816 |
+
data/0059/001034
|
2817 |
+
data/0059/001039
|
2818 |
+
data/0059/001044
|
2819 |
+
data/0059/001048
|
2820 |
+
data/0059/001054
|
2821 |
+
data/0059/001060
|
2822 |
+
data/0059/001063
|
2823 |
+
data/0059/001064
|
2824 |
+
data/0059/001068
|
2825 |
+
data/0059/001070
|
2826 |
+
data/0059/001071
|
2827 |
+
data/0059/001073
|
2828 |
+
data/0059/001074
|
2829 |
+
data/0059/001075
|
2830 |
+
data/0059/001076
|
2831 |
+
data/0059/001078
|
2832 |
+
data/0059/001081
|
2833 |
+
data/0059/001090
|
2834 |
+
data/0059/001093
|
2835 |
+
data/0059/001103
|
2836 |
+
data/0059/001110
|
2837 |
+
data/0059/001124
|
2838 |
+
data/0059/001129
|
2839 |
+
data/0059/001131
|
2840 |
+
data/0059/001132
|
2841 |
+
data/0059/001148
|
2842 |
+
data/0059/001152
|
2843 |
+
data/0059/001165
|
2844 |
+
data/0059/001170
|
2845 |
+
data/0059/001184
|
2846 |
+
data/0059/001199
|
2847 |
+
data/0059/001209
|
2848 |
+
data/0059/001214
|
2849 |
+
data/0059/001251
|
2850 |
+
data/0059/001259
|
2851 |
+
data/0059/001264
|
2852 |
+
data/0059/001271
|
2853 |
+
data/0059/001278
|
2854 |
+
data/0059/001295
|
2855 |
+
data/0059/001299
|
2856 |
+
data/0059/001308
|
2857 |
+
data/0059/001311
|
2858 |
+
data/0059/001313
|
2859 |
+
data/0059/001318
|
2860 |
+
data/0059/001320
|
2861 |
+
data/0059/001327
|
2862 |
+
data/0059/001334
|
2863 |
+
data/0059/001335
|
2864 |
+
data/0059/001349
|
2865 |
+
data/0059/001351
|
2866 |
+
data/0059/001360
|
2867 |
+
data/0059/001368
|
2868 |
+
data/0059/001374
|
2869 |
+
data/0059/001379
|
2870 |
+
data/0059/001385
|
2871 |
+
data/0059/001391
|
2872 |
+
data/0059/001399
|
2873 |
+
data/0059/001414
|
2874 |
+
data/0059/001428
|
2875 |
+
data/0059/001445
|
2876 |
+
data/0059/001458
|
2877 |
+
data/0059/001474
|
2878 |
+
data/0059/001483
|
2879 |
+
data/0059/001486
|
2880 |
+
data/0059/001487
|
2881 |
+
data/0059/001490
|
2882 |
+
data/0059/001491
|
2883 |
+
data/0059/001495
|
2884 |
+
data/0059/001498
|
2885 |
+
data/0059/001500
|
2886 |
+
data/0059/001509
|
2887 |
+
data/0059/001510
|
2888 |
+
data/0059/001514
|
2889 |
+
data/0059/001516
|
2890 |
+
data/0059/001520
|
2891 |
+
data/0059/001521
|
2892 |
+
data/0059/001523
|
2893 |
+
data/0059/001524
|
2894 |
+
data/0059/001528
|
2895 |
+
data/0059/001529
|
2896 |
+
data/0059/001539
|
2897 |
+
data/0059/001541
|
2898 |
+
data/0059/001544
|
2899 |
+
data/0059/001546
|
2900 |
+
data/0059/001553
|
2901 |
+
data/0059/001555
|
2902 |
+
data/0059/001559
|
2903 |
+
data/0059/001564
|
2904 |
+
data/0059/001565
|
2905 |
+
data/0059/001567
|
2906 |
+
data/0059/001574
|
2907 |
+
data/0059/001576
|
2908 |
+
data/0059/001581
|
2909 |
+
data/0059/001583
|
2910 |
+
data/0059/001585
|
2911 |
+
data/0059/001606
|
2912 |
+
data/0059/001612
|
2913 |
+
data/0059/001613
|
2914 |
+
data/0059/001621
|
2915 |
+
data/0059/001628
|
2916 |
+
data/0059/001637
|
2917 |
+
data/0059/001647
|
2918 |
+
data/0059/001661
|
2919 |
+
data/0059/001662
|
2920 |
+
data/0059/001669
|
2921 |
+
data/0059/001670
|
2922 |
+
data/0059/001677
|
2923 |
+
data/0059/001678
|
2924 |
+
data/0059/001682
|
2925 |
+
data/0059/001683
|
2926 |
+
data/0059/001690
|
2927 |
+
data/0059/001692
|
2928 |
+
data/0059/001696
|
2929 |
+
data/0059/001698
|
2930 |
+
data/0059/001699
|
2931 |
+
data/0059/001704
|
2932 |
+
data/0059/001707
|
2933 |
+
data/0059/001734
|
2934 |
+
data/0059/001747
|
2935 |
+
data/0059/001777
|
2936 |
+
data/0059/001782
|
2937 |
+
data/0059/001787
|
2938 |
+
data/0059/001800
|
2939 |
+
data/0059/001803
|
2940 |
+
data/0059/001806
|
2941 |
+
data/0059/001809
|
2942 |
+
data/0059/001812
|
2943 |
+
data/0059/001817
|
2944 |
+
data/0059/001819
|
2945 |
+
data/0059/001820
|
2946 |
+
data/0059/001821
|
2947 |
+
data/0059/001822
|
2948 |
+
data/0059/001824
|
2949 |
+
data/0059/001857
|
datasets/ycb/dataset_config/train_data_list.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
download.sh
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Download the datasets and checkpoints
|
2 |
+
|
3 |
+
if [ ! -d datasets/ycb/YCB_Video_Dataset ];then
|
4 |
+
echo 'Downloading the YCB-Video Dataset'
|
5 |
+
wget --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=1if4VoEXNx9W3XCn0Y7Fp15B4GpcYbyYi' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=1if4VoEXNx9W3XCn0Y7Fp15B4GpcYbyYi" -O YCB_Video_Dataset.zip && rm -rf /tmp/cookies.txt \
|
6 |
+
&& unzip YCB_Video_Dataset.zip \
|
7 |
+
&& mv YCB_Video_Dataset/ datasets/ycb/ \
|
8 |
+
&& rm YCB_Video_Dataset.zip
|
9 |
+
fi
|
10 |
+
|
11 |
+
if [ ! -d datasets/linemod/Linemod_preprocessed ];then
|
12 |
+
echo 'Downloading the preprocessed LineMOD dataset'
|
13 |
+
wget --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=1YFUra533pxS_IHsb9tB87lLoxbcHYXt8' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=1YFUra533pxS_IHsb9tB87lLoxbcHYXt8" -O Linemod_preprocessed.zip && rm -rf /tmp/cookies.txt \
|
14 |
+
&& unzip Linemod_preprocessed.zip \
|
15 |
+
&& mv Linemod_preprocessed/ datasets/linemod/ \
|
16 |
+
&& rm Linemod_preprocessed.zip
|
17 |
+
fi
|
18 |
+
|
19 |
+
if [ ! -d trained_checkpoints ];then
|
20 |
+
echo 'Downloading the trained checkpoints...'
|
21 |
+
wget --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=1bQ9H-fyZplQoNt1qRwdIUX5_3_1pj6US' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=1bQ9H-fyZplQoNt1qRwdIUX5_3_1pj6US" -O trained_checkpoints.zip && rm -rf /tmp/cookies.txt \
|
22 |
+
&& unzip trained_checkpoints.zip -x "__MACOSX/*" "*.DS_Store" "*.gitignore" -d trained_checkpoints \
|
23 |
+
&& mv trained_checkpoints/trained*/ycb trained_checkpoints/ycb \
|
24 |
+
&& mv trained_checkpoints/trained*/linemod trained_checkpoints/linemod \
|
25 |
+
&& rm -r trained_checkpoints/trained*/ \
|
26 |
+
&& rm trained_checkpoints.zip
|
27 |
+
fi
|
28 |
+
|
29 |
+
echo 'done'
|
experiments/eval_result/linemod/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*.txt
|
experiments/eval_result/ycb/Densefusion_iterative_result/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*.mat
|
experiments/eval_result/ycb/Densefusion_wo_refine_result/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*.mat
|
experiments/logs/linemod/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*.txt
|
experiments/logs/ycb/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*.txt
|
experiments/scripts/eval_linemod.sh
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -x
|
4 |
+
set -e
|
5 |
+
|
6 |
+
export PYTHONUNBUFFERED="True"
|
7 |
+
export CUDA_VISIBLE_DEVICES=0
|
8 |
+
|
9 |
+
python3 ./tools/eval_linemod.py --dataset_root ./datasets/linemod/Linemod_preprocessed\
|
10 |
+
--model trained_checkpoints/linemod/pose_model_9_0.01310166542980859.pth\
|
11 |
+
--refine_model trained_checkpoints/linemod/pose_refine_model_493_0.006761023565178073.pth
|
experiments/scripts/eval_ycb.sh
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -x
|
4 |
+
set -e
|
5 |
+
|
6 |
+
export PYTHONUNBUFFERED="True"
|
7 |
+
export CUDA_VISIBLE_DEVICES=0
|
8 |
+
|
9 |
+
if [ ! -d YCB_Video_toolbox ];then
|
10 |
+
echo 'Downloading the YCB_Video_toolbox...'
|
11 |
+
git clone https://github.com/yuxng/YCB_Video_toolbox.git
|
12 |
+
cd YCB_Video_toolbox
|
13 |
+
unzip results_PoseCNN_RSS2018.zip
|
14 |
+
cd ..
|
15 |
+
cp replace_ycb_toolbox/*.m YCB_Video_toolbox/
|
16 |
+
fi
|
17 |
+
|
18 |
+
python3 ./tools/eval_ycb.py --dataset_root ./datasets/ycb/YCB_Video_Dataset\
|
19 |
+
--model trained_checkpoints/ycb/pose_model_26_0.012863246640872631.pth\
|
20 |
+
--refine_model trained_checkpoints/ycb/pose_refine_model_69_0.009449292959118935.pth
|
experiments/scripts/train_linemod.sh
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -x
|
4 |
+
set -e
|
5 |
+
|
6 |
+
export PYTHONUNBUFFERED="True"
|
7 |
+
export CUDA_VISIBLE_DEVICES=0
|
8 |
+
|
9 |
+
python3 ./tools/train.py --dataset linemod\
|
10 |
+
--dataset_root ./datasets/linemod/Linemod_preprocessed
|
experiments/scripts/train_ycb.sh
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -x
|
4 |
+
set -e
|
5 |
+
|
6 |
+
export PYTHONUNBUFFERED="True"
|
7 |
+
export CUDA_VISIBLE_DEVICES=0
|
8 |
+
|
9 |
+
python3 ./tools/train.py --dataset ycb\
|
10 |
+
--dataset_root ./datasets/ycb/YCB_Video_Dataset
|
lib/extractors.py
ADDED
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import OrderedDict
|
2 |
+
import math
|
3 |
+
import random
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
import torch.nn.functional as F
|
7 |
+
|
8 |
+
def load_weights_sequential(target, source_state):
|
9 |
+
new_dict = OrderedDict()
|
10 |
+
for (k1, v1), (k2, v2) in zip(target.state_dict().items(), source_state.items()):
|
11 |
+
new_dict[k1] = v2
|
12 |
+
target.load_state_dict(new_dict)
|
13 |
+
|
14 |
+
def conv3x3(in_planes, out_planes, stride=1, dilation=1):
|
15 |
+
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
|
16 |
+
padding=dilation, dilation=dilation, bias=False)
|
17 |
+
|
18 |
+
class BasicBlock(nn.Module):
|
19 |
+
expansion = 1
|
20 |
+
|
21 |
+
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
|
22 |
+
super(BasicBlock, self).__init__()
|
23 |
+
self.conv1 = conv3x3(inplanes, planes, stride=stride, dilation=dilation)
|
24 |
+
self.relu = nn.ReLU(inplace=True)
|
25 |
+
self.conv2 = conv3x3(planes, planes, stride=1, dilation=dilation)
|
26 |
+
self.downsample = downsample
|
27 |
+
self.stride = stride
|
28 |
+
|
29 |
+
def forward(self, x):
|
30 |
+
residual = x
|
31 |
+
|
32 |
+
out = self.conv1(x)
|
33 |
+
out = self.relu(out)
|
34 |
+
|
35 |
+
out = self.conv2(out)
|
36 |
+
|
37 |
+
if self.downsample is not None:
|
38 |
+
residual = self.downsample(x)
|
39 |
+
|
40 |
+
out += residual
|
41 |
+
out = self.relu(out)
|
42 |
+
|
43 |
+
return out
|
44 |
+
|
45 |
+
|
46 |
+
class Bottleneck(nn.Module):
|
47 |
+
expansion = 4
|
48 |
+
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
|
49 |
+
super(Bottleneck, self).__init__()
|
50 |
+
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
|
51 |
+
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, dilation=dilation,
|
52 |
+
padding=dilation, bias=False)
|
53 |
+
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
|
54 |
+
self.relu = nn.ReLU(inplace=True)
|
55 |
+
self.downsample = downsample
|
56 |
+
self.stride = stride
|
57 |
+
|
58 |
+
def forward(self, x):
|
59 |
+
residual = x
|
60 |
+
|
61 |
+
out = self.conv1(x)
|
62 |
+
out = self.relu(out)
|
63 |
+
|
64 |
+
out = self.conv2(out)
|
65 |
+
out = self.relu(out)
|
66 |
+
|
67 |
+
out = self.conv3(out)
|
68 |
+
|
69 |
+
if self.downsample is not None:
|
70 |
+
residual = self.downsample(x)
|
71 |
+
|
72 |
+
out += residual
|
73 |
+
out = self.relu(out)
|
74 |
+
|
75 |
+
return out
|
76 |
+
|
77 |
+
|
78 |
+
class ResNet(nn.Module):
|
79 |
+
def __init__(self, block, layers=(3, 4, 23, 3)):
|
80 |
+
self.inplanes = 64
|
81 |
+
super(ResNet, self).__init__()
|
82 |
+
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
|
83 |
+
bias=False)
|
84 |
+
self.relu = nn.ReLU(inplace=True)
|
85 |
+
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
|
86 |
+
self.layer1 = self._make_layer(block, 64, layers[0])
|
87 |
+
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
|
88 |
+
self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2)
|
89 |
+
self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4)
|
90 |
+
|
91 |
+
for m in self.modules():
|
92 |
+
if isinstance(m, nn.Conv2d):
|
93 |
+
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
|
94 |
+
m.weight.data.normal_(0, math.sqrt(2. / n))
|
95 |
+
elif isinstance(m, nn.BatchNorm2d):
|
96 |
+
m.weight.data.fill_(1)
|
97 |
+
m.bias.data.zero_()
|
98 |
+
|
99 |
+
def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
|
100 |
+
downsample = None
|
101 |
+
if stride != 1 or self.inplanes != planes * block.expansion:
|
102 |
+
downsample = nn.Sequential(
|
103 |
+
nn.Conv2d(self.inplanes, planes * block.expansion,
|
104 |
+
kernel_size=1, stride=stride, bias=False)
|
105 |
+
)
|
106 |
+
|
107 |
+
layers = [block(self.inplanes, planes, stride, downsample)]
|
108 |
+
self.inplanes = planes * block.expansion
|
109 |
+
for i in range(1, blocks):
|
110 |
+
layers.append(block(self.inplanes, planes, dilation=dilation))
|
111 |
+
|
112 |
+
return nn.Sequential(*layers)
|
113 |
+
|
114 |
+
def forward(self, x):
|
115 |
+
x = self.conv1(x)
|
116 |
+
x = self.relu(x)
|
117 |
+
x = self.maxpool(x)
|
118 |
+
|
119 |
+
x = self.layer1(x)
|
120 |
+
x = self.layer2(x)
|
121 |
+
x_3 = self.layer3(x)
|
122 |
+
x = self.layer4(x_3)
|
123 |
+
|
124 |
+
return x, x_3
|
125 |
+
|
126 |
+
|
127 |
+
def resnet18(pretrained=False):
|
128 |
+
model = ResNet(BasicBlock, [2, 2, 2, 2])
|
129 |
+
return model
|
130 |
+
|
131 |
+
def resnet34(pretrained=False):
|
132 |
+
model = ResNet(BasicBlock, [3, 4, 6, 3])
|
133 |
+
return model
|
134 |
+
|
135 |
+
def resnet50(pretrained=False):
|
136 |
+
model = ResNet(Bottleneck, [3, 4, 6, 3])
|
137 |
+
return model
|
138 |
+
|
139 |
+
def resnet101(pretrained=False):
|
140 |
+
model = ResNet(Bottleneck, [3, 4, 23, 3])
|
141 |
+
return model
|
142 |
+
|
143 |
+
def resnet152(pretrained=False):
|
144 |
+
model = ResNet(Bottleneck, [3, 8, 36, 3])
|
145 |
+
return model
|
lib/knn/Makefile
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Unix commands.
|
2 |
+
PYTHON := python3
|
3 |
+
NVCC_COMPILE := nvcc -c -o
|
4 |
+
RM_RF := rm -rf
|
5 |
+
|
6 |
+
# Library compilation rules.
|
7 |
+
NVCC_FLAGS := -x cu -Xcompiler -fPIC -shared
|
8 |
+
|
9 |
+
# File structure.
|
10 |
+
BUILD_DIR := build
|
11 |
+
INCLUDE_DIRS := src
|
12 |
+
TORCH_FFI_BUILD := build_ffi.py
|
13 |
+
KNN_KERNEL := $(BUILD_DIR)/knn_cuda_kernel.so
|
14 |
+
TORCH_FFI_TARGET := $(BUILD_DIR)/knn_pytorch/_knn_pytorch.so
|
15 |
+
|
16 |
+
INCLUDE_FLAGS := $(foreach d, $(INCLUDE_DIRS), -I$d)
|
17 |
+
|
18 |
+
DEBUB := 0
|
19 |
+
|
20 |
+
# Debugging
|
21 |
+
ifeq ($(DEBUG), 1)
|
22 |
+
COMMON_FLAGS += -DDEBUG -g -O0
|
23 |
+
NVCC_FLAGS += -G
|
24 |
+
else
|
25 |
+
COMMON_FLAGS += -DNDEBUG -O2
|
26 |
+
endif
|
27 |
+
|
28 |
+
all: $(TORCH_FFI_TARGET)
|
29 |
+
|
30 |
+
$(TORCH_FFI_TARGET): $(KNN_KERNEL) $(TORCH_FFI_BUILD)
|
31 |
+
$(PYTHON) $(TORCH_FFI_BUILD)
|
32 |
+
|
33 |
+
$(BUILD_DIR)/%.so: src/%.cu
|
34 |
+
@ mkdir -p $(BUILD_DIR)
|
35 |
+
# Separate cpp shared library that will be loaded to the extern C ffi
|
36 |
+
$(NVCC_COMPILE) $@ $? $(NVCC_FLAGS) $(INCLUDE_FLAGS)
|
37 |
+
|
38 |
+
clean:
|
39 |
+
$(RM_RF) $(BUILD_DIR) $(KNN_KERNEL)
|
lib/knn/__init__.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import unittest
|
2 |
+
import gc
|
3 |
+
import operator as op
|
4 |
+
import functools
|
5 |
+
import torch
|
6 |
+
from torch.autograd import Variable, Function
|
7 |
+
from lib.knn import knn_pytorch as knn_pytorch
|
8 |
+
|
9 |
+
class KNearestNeighbor(Function):
|
10 |
+
""" Compute k nearest neighbors for each query point.
|
11 |
+
"""
|
12 |
+
def __init__(self, k):
|
13 |
+
self.k = k
|
14 |
+
|
15 |
+
def forward(self, ref, query):
|
16 |
+
ref = ref.float().cuda()
|
17 |
+
query = query.float().cuda()
|
18 |
+
|
19 |
+
inds = torch.empty(query.shape[0], self.k, query.shape[2]).long().cuda()
|
20 |
+
|
21 |
+
knn_pytorch.knn(ref, query, inds)
|
22 |
+
|
23 |
+
return inds
|
24 |
+
|
25 |
+
|
26 |
+
class TestKNearestNeighbor(unittest.TestCase):
|
27 |
+
|
28 |
+
def test_forward(self):
|
29 |
+
knn = KNearestNeighbor(2)
|
30 |
+
while(1):
|
31 |
+
D, N, M = 128, 100, 1000
|
32 |
+
ref = Variable(torch.rand(2, D, N))
|
33 |
+
query = Variable(torch.rand(2, D, M))
|
34 |
+
|
35 |
+
inds = knn(ref, query)
|
36 |
+
for obj in gc.get_objects():
|
37 |
+
if torch.is_tensor(obj):
|
38 |
+
print(functools.reduce(op.mul, obj.size()) if len(obj.size()) > 0 else 0, type(obj), obj.size())
|
39 |
+
#ref = ref.cpu()
|
40 |
+
#query = query.cpu()
|
41 |
+
print(inds)
|
42 |
+
|
43 |
+
|
44 |
+
if __name__ == '__main__':
|
45 |
+
unittest.main()
|
lib/knn/build/knn_cuda_kernel.so
ADDED
Binary file (20.7 kB). View file
|
|
lib/knn/build_ffi.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# https://gist.github.com/tonyseek/7821993
|
2 |
+
import glob
|
3 |
+
import torch
|
4 |
+
from os import path as osp
|
5 |
+
from torch.utils.ffi import create_extension
|
6 |
+
|
7 |
+
abs_path = osp.dirname(osp.realpath(__file__))
|
8 |
+
extra_objects = [osp.join(abs_path, 'build/knn_cuda_kernel.so')]
|
9 |
+
extra_objects += glob.glob('/usr/local/cuda/lib64/*.a')
|
10 |
+
|
11 |
+
ffi = create_extension(
|
12 |
+
'knn_pytorch',
|
13 |
+
headers=['src/knn_pytorch.h'],
|
14 |
+
sources=['src/knn_pytorch.c'],
|
15 |
+
define_macros=[('WITH_CUDA', None)],
|
16 |
+
relative_to=__file__,
|
17 |
+
with_cuda=True,
|
18 |
+
extra_objects=extra_objects,
|
19 |
+
include_dirs=[osp.join(abs_path, 'include')]
|
20 |
+
)
|
21 |
+
|
22 |
+
|
23 |
+
if __name__ == '__main__':
|
24 |
+
assert torch.cuda.is_available(), 'Please install CUDA for GPU support.'
|
25 |
+
ffi.build()
|
lib/knn/knn_pytorch/__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
from torch.utils.ffi import _wrap_function
|
3 |
+
from ._knn_pytorch import lib as _lib, ffi as _ffi
|
4 |
+
|
5 |
+
__all__ = []
|
6 |
+
def _import_symbols(locals):
|
7 |
+
for symbol in dir(_lib):
|
8 |
+
fn = getattr(_lib, symbol)
|
9 |
+
if callable(fn):
|
10 |
+
locals[symbol] = _wrap_function(fn, _ffi)
|
11 |
+
else:
|
12 |
+
locals[symbol] = fn
|
13 |
+
__all__.append(symbol)
|
14 |
+
|
15 |
+
_import_symbols(locals())
|
lib/knn/knn_pytorch/__pycache__/__init__.cpython-35.pyc
ADDED
Binary file (589 Bytes). View file
|
|
lib/knn/knn_pytorch/__pycache__/__init__.cpython-36.pyc
ADDED
Binary file (548 Bytes). View file
|
|
lib/knn/knn_pytorch/_knn_pytorch.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0bb7f7f35106cf880621799eb248771672bed927fe68cd63d8742d5a91ac851f
|
3 |
+
size 616800
|
lib/knn/src/knn_cuda_kernel.cu
ADDED
@@ -0,0 +1,259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/** Modifed version of knn-CUDA from https://github.com/vincentfpgarcia/kNN-CUDA
|
2 |
+
* The modifications are
|
3 |
+
* removed texture memory usage
|
4 |
+
* removed split query KNN computation
|
5 |
+
* added feature extraction with bilinear interpolation
|
6 |
+
*
|
7 |
+
* Last modified by Christopher B. Choy <[email protected]> 12/23/2016
|
8 |
+
*/
|
9 |
+
|
10 |
+
// Includes
|
11 |
+
#include <cstdio>
|
12 |
+
#include "cuda.h"
|
13 |
+
|
14 |
+
#include "knn_cuda_kernel.h"
|
15 |
+
|
16 |
+
// Constants used by the program
|
17 |
+
#define BLOCK_DIM 16
|
18 |
+
#define DEBUG 0
|
19 |
+
|
20 |
+
/**
|
21 |
+
* Computes the distance between two matrix A (reference points) and
|
22 |
+
* B (query points) containing respectively wA and wB points.
|
23 |
+
*
|
24 |
+
* @param A pointer on the matrix A
|
25 |
+
* @param wA width of the matrix A = number of points in A
|
26 |
+
* @param B pointer on the matrix B
|
27 |
+
* @param wB width of the matrix B = number of points in B
|
28 |
+
* @param dim dimension of points = height of matrices A and B
|
29 |
+
* @param AB pointer on the matrix containing the wA*wB distances computed
|
30 |
+
*/
|
31 |
+
__global__ void cuComputeDistanceGlobal( float* A, int wA,
|
32 |
+
float* B, int wB, int dim, float* AB){
|
33 |
+
|
34 |
+
// Declaration of the shared memory arrays As and Bs used to store the sub-matrix of A and B
|
35 |
+
__shared__ float shared_A[BLOCK_DIM][BLOCK_DIM];
|
36 |
+
__shared__ float shared_B[BLOCK_DIM][BLOCK_DIM];
|
37 |
+
|
38 |
+
// Sub-matrix of A (begin, step, end) and Sub-matrix of B (begin, step)
|
39 |
+
__shared__ int begin_A;
|
40 |
+
__shared__ int begin_B;
|
41 |
+
__shared__ int step_A;
|
42 |
+
__shared__ int step_B;
|
43 |
+
__shared__ int end_A;
|
44 |
+
|
45 |
+
// Thread index
|
46 |
+
int tx = threadIdx.x;
|
47 |
+
int ty = threadIdx.y;
|
48 |
+
|
49 |
+
// Other variables
|
50 |
+
float tmp;
|
51 |
+
float ssd = 0;
|
52 |
+
|
53 |
+
// Loop parameters
|
54 |
+
begin_A = BLOCK_DIM * blockIdx.y;
|
55 |
+
begin_B = BLOCK_DIM * blockIdx.x;
|
56 |
+
step_A = BLOCK_DIM * wA;
|
57 |
+
step_B = BLOCK_DIM * wB;
|
58 |
+
end_A = begin_A + (dim-1) * wA;
|
59 |
+
|
60 |
+
// Conditions
|
61 |
+
int cond0 = (begin_A + tx < wA); // used to write in shared memory
|
62 |
+
int cond1 = (begin_B + tx < wB); // used to write in shared memory & to computations and to write in output matrix
|
63 |
+
int cond2 = (begin_A + ty < wA); // used to computations and to write in output matrix
|
64 |
+
|
65 |
+
// Loop over all the sub-matrices of A and B required to compute the block sub-matrix
|
66 |
+
for (int a = begin_A, b = begin_B; a <= end_A; a += step_A, b += step_B) {
|
67 |
+
// Load the matrices from device memory to shared memory; each thread loads one element of each matrix
|
68 |
+
if (a/wA + ty < dim){
|
69 |
+
shared_A[ty][tx] = (cond0)? A[a + wA * ty + tx] : 0;
|
70 |
+
shared_B[ty][tx] = (cond1)? B[b + wB * ty + tx] : 0;
|
71 |
+
}
|
72 |
+
else{
|
73 |
+
shared_A[ty][tx] = 0;
|
74 |
+
shared_B[ty][tx] = 0;
|
75 |
+
}
|
76 |
+
|
77 |
+
// Synchronize to make sure the matrices are loaded
|
78 |
+
__syncthreads();
|
79 |
+
|
80 |
+
// Compute the difference between the two matrixes; each thread computes one element of the block sub-matrix
|
81 |
+
if (cond2 && cond1){
|
82 |
+
for (int k = 0; k < BLOCK_DIM; ++k){
|
83 |
+
tmp = shared_A[k][ty] - shared_B[k][tx];
|
84 |
+
ssd += tmp*tmp;
|
85 |
+
}
|
86 |
+
}
|
87 |
+
|
88 |
+
// Synchronize to make sure that the preceding computation is done before loading two new sub-matrices of A and B in the next iteration
|
89 |
+
__syncthreads();
|
90 |
+
}
|
91 |
+
|
92 |
+
// Write the block sub-matrix to device memory; each thread writes one element
|
93 |
+
if (cond2 && cond1)
|
94 |
+
AB[(begin_A + ty) * wB + begin_B + tx] = ssd;
|
95 |
+
}
|
96 |
+
|
97 |
+
|
98 |
+
/**
|
99 |
+
* Gathers k-th smallest distances for each column of the distance matrix in the top.
|
100 |
+
*
|
101 |
+
* @param dist distance matrix
|
102 |
+
* @param ind index matrix
|
103 |
+
* @param width width of the distance matrix and of the index matrix
|
104 |
+
* @param height height of the distance matrix and of the index matrix
|
105 |
+
* @param k number of neighbors to consider
|
106 |
+
*/
|
107 |
+
__global__ void cuInsertionSort(float *dist, long *ind, int width, int height, int k){
|
108 |
+
|
109 |
+
// Variables
|
110 |
+
int l, i, j;
|
111 |
+
float *p_dist;
|
112 |
+
long *p_ind;
|
113 |
+
float curr_dist, max_dist;
|
114 |
+
long curr_row, max_row;
|
115 |
+
unsigned int xIndex = blockIdx.x * blockDim.x + threadIdx.x;
|
116 |
+
|
117 |
+
if (xIndex<width){
|
118 |
+
// Pointer shift, initialization, and max value
|
119 |
+
p_dist = dist + xIndex;
|
120 |
+
p_ind = ind + xIndex;
|
121 |
+
max_dist = p_dist[0];
|
122 |
+
p_ind[0] = 1;
|
123 |
+
|
124 |
+
// Part 1 : sort kth firt elementZ
|
125 |
+
for (l=1; l<k; l++){
|
126 |
+
curr_row = l * width;
|
127 |
+
curr_dist = p_dist[curr_row];
|
128 |
+
if (curr_dist<max_dist){
|
129 |
+
i=l-1;
|
130 |
+
for (int a=0; a<l-1; a++){
|
131 |
+
if (p_dist[a*width]>curr_dist){
|
132 |
+
i=a;
|
133 |
+
break;
|
134 |
+
}
|
135 |
+
}
|
136 |
+
for (j=l; j>i; j--){
|
137 |
+
p_dist[j*width] = p_dist[(j-1)*width];
|
138 |
+
p_ind[j*width] = p_ind[(j-1)*width];
|
139 |
+
}
|
140 |
+
p_dist[i*width] = curr_dist;
|
141 |
+
p_ind[i*width] = l+1;
|
142 |
+
} else {
|
143 |
+
p_ind[l*width] = l+1;
|
144 |
+
}
|
145 |
+
max_dist = p_dist[curr_row];
|
146 |
+
}
|
147 |
+
|
148 |
+
// Part 2 : insert element in the k-th first lines
|
149 |
+
max_row = (k-1)*width;
|
150 |
+
for (l=k; l<height; l++){
|
151 |
+
curr_dist = p_dist[l*width];
|
152 |
+
if (curr_dist<max_dist){
|
153 |
+
i=k-1;
|
154 |
+
for (int a=0; a<k-1; a++){
|
155 |
+
if (p_dist[a*width]>curr_dist){
|
156 |
+
i=a;
|
157 |
+
break;
|
158 |
+
}
|
159 |
+
}
|
160 |
+
for (j=k-1; j>i; j--){
|
161 |
+
p_dist[j*width] = p_dist[(j-1)*width];
|
162 |
+
p_ind[j*width] = p_ind[(j-1)*width];
|
163 |
+
}
|
164 |
+
p_dist[i*width] = curr_dist;
|
165 |
+
p_ind[i*width] = l+1;
|
166 |
+
max_dist = p_dist[max_row];
|
167 |
+
}
|
168 |
+
}
|
169 |
+
}
|
170 |
+
}
|
171 |
+
|
172 |
+
|
173 |
+
/**
|
174 |
+
* Computes the square root of the first line (width-th first element)
|
175 |
+
* of the distance matrix.
|
176 |
+
*
|
177 |
+
* @param dist distance matrix
|
178 |
+
* @param width width of the distance matrix
|
179 |
+
* @param k number of neighbors to consider
|
180 |
+
*/
|
181 |
+
__global__ void cuParallelSqrt(float *dist, int width, int k){
|
182 |
+
unsigned int xIndex = blockIdx.x * blockDim.x + threadIdx.x;
|
183 |
+
unsigned int yIndex = blockIdx.y * blockDim.y + threadIdx.y;
|
184 |
+
if (xIndex<width && yIndex<k)
|
185 |
+
dist[yIndex*width + xIndex] = sqrt(dist[yIndex*width + xIndex]);
|
186 |
+
}
|
187 |
+
|
188 |
+
|
189 |
+
//-----------------------------------------------------------------------------------------------//
|
190 |
+
// K-th NEAREST NEIGHBORS //
|
191 |
+
//-----------------------------------------------------------------------------------------------//
|
192 |
+
|
193 |
+
/**
|
194 |
+
* K nearest neighbor algorithm
|
195 |
+
* - Initialize CUDA
|
196 |
+
* - Allocate device memory
|
197 |
+
* - Copy point sets (reference and query points) from host to device memory
|
198 |
+
* - Compute the distances + indexes to the k nearest neighbors for each query point
|
199 |
+
* - Copy distances from device to host memory
|
200 |
+
*
|
201 |
+
* @param ref_host reference points ; pointer to linear matrix
|
202 |
+
* @param ref_nb number of reference points ; width of the matrix
|
203 |
+
* @param query_host query points ; pointer to linear matrix
|
204 |
+
* @param query_nb number of query points ; width of the matrix
|
205 |
+
* @param dim dimension of points ; height of the matrices
|
206 |
+
* @param k number of neighbor to consider
|
207 |
+
* @param dist_host distances to k nearest neighbors ; pointer to linear matrix
|
208 |
+
* @param dist_host indexes of the k nearest neighbors ; pointer to linear matrix
|
209 |
+
*
|
210 |
+
*/
|
211 |
+
void knn_device(float* ref_dev, int ref_nb, float* query_dev, int query_nb,
|
212 |
+
int dim, int k, float* dist_dev, long* ind_dev, cudaStream_t stream){
|
213 |
+
|
214 |
+
// Grids and threads
|
215 |
+
dim3 g_16x16(query_nb/16, ref_nb/16, 1);
|
216 |
+
dim3 t_16x16(16, 16, 1);
|
217 |
+
if (query_nb%16 != 0) g_16x16.x += 1;
|
218 |
+
if (ref_nb %16 != 0) g_16x16.y += 1;
|
219 |
+
//
|
220 |
+
dim3 g_256x1(query_nb/256, 1, 1);
|
221 |
+
dim3 t_256x1(256, 1, 1);
|
222 |
+
if (query_nb%256 != 0) g_256x1.x += 1;
|
223 |
+
|
224 |
+
dim3 g_k_16x16(query_nb/16, k/16, 1);
|
225 |
+
dim3 t_k_16x16(16, 16, 1);
|
226 |
+
if (query_nb%16 != 0) g_k_16x16.x += 1;
|
227 |
+
if (k %16 != 0) g_k_16x16.y += 1;
|
228 |
+
|
229 |
+
// Kernel 1: Compute all the distances
|
230 |
+
cuComputeDistanceGlobal<<<g_16x16, t_16x16, 0, stream>>>(ref_dev, ref_nb,
|
231 |
+
query_dev, query_nb, dim, dist_dev);
|
232 |
+
|
233 |
+
// Kernel 2: Sort each column
|
234 |
+
cuInsertionSort<<<g_256x1, t_256x1, 0, stream>>>(dist_dev, ind_dev,
|
235 |
+
query_nb, ref_nb, k);
|
236 |
+
|
237 |
+
// Kernel 3: Compute square root of k first elements
|
238 |
+
// cuParallelSqrt<<<g_k_16x16,t_k_16x16, 0, stream>>>(dist_dev, query_nb, k);
|
239 |
+
|
240 |
+
#if DEBUG
|
241 |
+
unsigned int size_of_float = sizeof(float);
|
242 |
+
unsigned long size_of_long = sizeof(long);
|
243 |
+
|
244 |
+
float* dist_host = new float[query_nb * k];
|
245 |
+
long* idx_host = new long[query_nb * k];
|
246 |
+
|
247 |
+
// Memory copy of output from device to host
|
248 |
+
cudaMemcpy(&dist_host[0], dist_dev,
|
249 |
+
query_nb * k *size_of_float, cudaMemcpyDeviceToHost);
|
250 |
+
|
251 |
+
cudaMemcpy(&idx_host[0], ind_dev,
|
252 |
+
query_nb * k * size_of_long, cudaMemcpyDeviceToHost);
|
253 |
+
|
254 |
+
int i = 0;
|
255 |
+
for(i = 0; i < 100; i++){
|
256 |
+
printf("IDX[%d]: %d\n", i, (int)idx_host[i]);
|
257 |
+
}
|
258 |
+
#endif
|
259 |
+
}
|
lib/knn/src/knn_cuda_kernel.h
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#ifndef _MATHUTIL_CUDA_KERNEL
|
2 |
+
#define _MATHUTIL_CUDA_KERNEL
|
3 |
+
|
4 |
+
#define IDX2D(i, j, dj) (dj * i + j)
|
5 |
+
#define IDX3D(i, j, k, dj, dk) (IDX2D(IDX2D(i, j, dj), k, dk))
|
6 |
+
|
7 |
+
#define BLOCK 512
|
8 |
+
#define MAX_STREAMS 512
|
9 |
+
|
10 |
+
#ifdef __cplusplus
|
11 |
+
extern "C" {
|
12 |
+
#endif
|
13 |
+
|
14 |
+
void knn_device(float* ref_dev, int ref_width,
|
15 |
+
float* query_dev, int query_width,
|
16 |
+
int height, int k, float* dist_dev, long* ind_dev, cudaStream_t stream);
|
17 |
+
|
18 |
+
#ifdef __cplusplus
|
19 |
+
}
|
20 |
+
#endif
|
21 |
+
|
22 |
+
#endif
|
lib/knn/src/knn_pytorch.c
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#include <THC/THC.h>
|
2 |
+
#include "knn_cuda_kernel.h"
|
3 |
+
|
4 |
+
extern THCState *state;
|
5 |
+
|
6 |
+
int knn(THCudaTensor *ref_tensor, THCudaTensor *query_tensor,
|
7 |
+
THCudaLongTensor *idx_tensor) {
|
8 |
+
|
9 |
+
THCAssertSameGPU(THCudaTensor_checkGPU(state, 3, idx_tensor, ref_tensor, query_tensor));
|
10 |
+
long batch, ref_nb, query_nb, dim, k;
|
11 |
+
THArgCheck(THCudaTensor_nDimension(state, ref_tensor) == 3 , 0, "ref_tensor: 3D Tensor expected");
|
12 |
+
THArgCheck(THCudaTensor_nDimension(state, query_tensor) == 3 , 1, "query_tensor: 3D Tensor expected");
|
13 |
+
THArgCheck(THCudaLongTensor_nDimension(state, idx_tensor) == 3 , 3, "idx_tensor: 3D Tensor expected");
|
14 |
+
THArgCheck(THCudaTensor_size(state, ref_tensor, 0) == THCudaTensor_size(state, query_tensor,0), 0, "input sizes must match");
|
15 |
+
THArgCheck(THCudaTensor_size(state, ref_tensor, 1) == THCudaTensor_size(state, query_tensor,1), 0, "input sizes must match");
|
16 |
+
THArgCheck(THCudaTensor_size(state, idx_tensor, 2) == THCudaTensor_size(state, query_tensor,2), 0, "input sizes must match");
|
17 |
+
|
18 |
+
//ref_tensor = THCudaTensor_newContiguous(state, ref_tensor);
|
19 |
+
//query_tensor = THCudaTensor_newContiguous(state, query_tensor);
|
20 |
+
|
21 |
+
batch = THCudaLongTensor_size(state, ref_tensor, 0);
|
22 |
+
dim = THCudaTensor_size(state, ref_tensor, 1);
|
23 |
+
k = THCudaLongTensor_size(state, idx_tensor, 1);
|
24 |
+
ref_nb = THCudaTensor_size(state, ref_tensor, 2);
|
25 |
+
query_nb = THCudaTensor_size(state, query_tensor, 2);
|
26 |
+
|
27 |
+
float *ref_dev = THCudaTensor_data(state, ref_tensor);
|
28 |
+
float *query_dev = THCudaTensor_data(state, query_tensor);
|
29 |
+
long *idx_dev = THCudaLongTensor_data(state, idx_tensor);
|
30 |
+
// scratch buffer for distances
|
31 |
+
float *dist_dev = (float*)THCudaMalloc(state, ref_nb * query_nb * sizeof(float));
|
32 |
+
|
33 |
+
for (int b = 0; b < batch; b++) {
|
34 |
+
knn_device(ref_dev + b * dim * ref_nb, ref_nb, query_dev + b * dim * query_nb, query_nb, dim, k,
|
35 |
+
dist_dev, idx_dev + b * k * query_nb, THCState_getCurrentStream(state));
|
36 |
+
}
|
37 |
+
// free buffer
|
38 |
+
THCudaFree(state, dist_dev);
|
39 |
+
//printf("aaaaa\n");
|
40 |
+
// check for errors
|
41 |
+
cudaError_t err = cudaGetLastError();
|
42 |
+
if (err != cudaSuccess) {
|
43 |
+
printf("error in knn: %s\n", cudaGetErrorString(err));
|
44 |
+
THError("aborting");
|
45 |
+
}
|
46 |
+
|
47 |
+
return 1;
|
48 |
+
}
|
lib/knn/src/knn_pytorch.h
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
int knn(THCudaTensor *ref_tensor, THCudaTensor *query_tensor,
|
2 |
+
THCudaLongTensor *idx_tensor);
|
lib/loss.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.nn.modules.loss import _Loss
|
2 |
+
from torch.autograd import Variable
|
3 |
+
import torch
|
4 |
+
import time
|
5 |
+
import numpy as np
|
6 |
+
import torch.nn as nn
|
7 |
+
import random
|
8 |
+
import torch.backends.cudnn as cudnn
|
9 |
+
from lib.knn.__init__ import KNearestNeighbor
|
10 |
+
|
11 |
+
|
12 |
+
def loss_calculation(pred_r, pred_t, pred_c, target, model_points, idx, points, w, refine, num_point_mesh, sym_list):
|
13 |
+
knn = KNearestNeighbor(1)
|
14 |
+
bs, num_p, _ = pred_c.size()
|
15 |
+
|
16 |
+
pred_r = pred_r / (torch.norm(pred_r, dim=2).view(bs, num_p, 1))
|
17 |
+
|
18 |
+
base = torch.cat(((1.0 - 2.0*(pred_r[:, :, 2]**2 + pred_r[:, :, 3]**2)).view(bs, num_p, 1),\
|
19 |
+
(2.0*pred_r[:, :, 1]*pred_r[:, :, 2] - 2.0*pred_r[:, :, 0]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
20 |
+
(2.0*pred_r[:, :, 0]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 1]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
21 |
+
(2.0*pred_r[:, :, 1]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 3]*pred_r[:, :, 0]).view(bs, num_p, 1), \
|
22 |
+
(1.0 - 2.0*(pred_r[:, :, 1]**2 + pred_r[:, :, 3]**2)).view(bs, num_p, 1), \
|
23 |
+
(-2.0*pred_r[:, :, 0]*pred_r[:, :, 1] + 2.0*pred_r[:, :, 2]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
24 |
+
(-2.0*pred_r[:, :, 0]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 1]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
25 |
+
(2.0*pred_r[:, :, 0]*pred_r[:, :, 1] + 2.0*pred_r[:, :, 2]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
26 |
+
(1.0 - 2.0*(pred_r[:, :, 1]**2 + pred_r[:, :, 2]**2)).view(bs, num_p, 1)), dim=2).contiguous().view(bs * num_p, 3, 3)
|
27 |
+
|
28 |
+
ori_base = base
|
29 |
+
base = base.contiguous().transpose(2, 1).contiguous()
|
30 |
+
model_points = model_points.view(bs, 1, num_point_mesh, 3).repeat(1, num_p, 1, 1).view(bs * num_p, num_point_mesh, 3)
|
31 |
+
target = target.view(bs, 1, num_point_mesh, 3).repeat(1, num_p, 1, 1).view(bs * num_p, num_point_mesh, 3)
|
32 |
+
ori_target = target
|
33 |
+
pred_t = pred_t.contiguous().view(bs * num_p, 1, 3)
|
34 |
+
ori_t = pred_t
|
35 |
+
points = points.contiguous().view(bs * num_p, 1, 3)
|
36 |
+
pred_c = pred_c.contiguous().view(bs * num_p)
|
37 |
+
|
38 |
+
pred = torch.add(torch.bmm(model_points, base), points + pred_t)
|
39 |
+
|
40 |
+
if not refine:
|
41 |
+
if idx[0].item() in sym_list:
|
42 |
+
target = target[0].transpose(1, 0).contiguous().view(3, -1)
|
43 |
+
pred = pred.permute(2, 0, 1).contiguous().view(3, -1)
|
44 |
+
inds = knn(target.unsqueeze(0), pred.unsqueeze(0))
|
45 |
+
target = torch.index_select(target, 1, inds.view(-1) - 1)
|
46 |
+
target = target.view(3, bs * num_p, num_point_mesh).permute(1, 2, 0).contiguous()
|
47 |
+
pred = pred.view(3, bs * num_p, num_point_mesh).permute(1, 2, 0).contiguous()
|
48 |
+
|
49 |
+
dis = torch.mean(torch.norm((pred - target), dim=2), dim=1)
|
50 |
+
loss = torch.mean((dis * pred_c - w * torch.log(pred_c)), dim=0)
|
51 |
+
|
52 |
+
|
53 |
+
pred_c = pred_c.view(bs, num_p)
|
54 |
+
how_max, which_max = torch.max(pred_c, 1)
|
55 |
+
dis = dis.view(bs, num_p)
|
56 |
+
|
57 |
+
|
58 |
+
t = ori_t[which_max[0]] + points[which_max[0]]
|
59 |
+
points = points.view(1, bs * num_p, 3)
|
60 |
+
|
61 |
+
ori_base = ori_base[which_max[0]].view(1, 3, 3).contiguous()
|
62 |
+
ori_t = t.repeat(bs * num_p, 1).contiguous().view(1, bs * num_p, 3)
|
63 |
+
new_points = torch.bmm((points - ori_t), ori_base).contiguous()
|
64 |
+
|
65 |
+
new_target = ori_target[0].view(1, num_point_mesh, 3).contiguous()
|
66 |
+
ori_t = t.repeat(num_point_mesh, 1).contiguous().view(1, num_point_mesh, 3)
|
67 |
+
new_target = torch.bmm((new_target - ori_t), ori_base).contiguous()
|
68 |
+
|
69 |
+
# print('------------> ', dis[0][which_max[0]].item(), pred_c[0][which_max[0]].item(), idx[0].item())
|
70 |
+
del knn
|
71 |
+
return loss, dis[0][which_max[0]], new_points.detach(), new_target.detach()
|
72 |
+
|
73 |
+
|
74 |
+
class Loss(_Loss):
|
75 |
+
|
76 |
+
def __init__(self, num_points_mesh, sym_list):
|
77 |
+
super(Loss, self).__init__(True)
|
78 |
+
self.num_pt_mesh = num_points_mesh
|
79 |
+
self.sym_list = sym_list
|
80 |
+
|
81 |
+
def forward(self, pred_r, pred_t, pred_c, target, model_points, idx, points, w, refine):
|
82 |
+
|
83 |
+
return loss_calculation(pred_r, pred_t, pred_c, target, model_points, idx, points, w, refine, self.num_pt_mesh, self.sym_list)
|
lib/loss_refiner.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.nn.modules.loss import _Loss
|
2 |
+
from torch.autograd import Variable
|
3 |
+
import torch
|
4 |
+
import time
|
5 |
+
import numpy as np
|
6 |
+
import torch.nn as nn
|
7 |
+
import random
|
8 |
+
import torch.backends.cudnn as cudnn
|
9 |
+
from lib.knn.__init__ import KNearestNeighbor
|
10 |
+
|
11 |
+
|
12 |
+
def loss_calculation(pred_r, pred_t, target, model_points, idx, points, num_point_mesh, sym_list):
|
13 |
+
knn = KNearestNeighbor(1)
|
14 |
+
pred_r = pred_r.view(1, 1, -1)
|
15 |
+
pred_t = pred_t.view(1, 1, -1)
|
16 |
+
bs, num_p, _ = pred_r.size()
|
17 |
+
num_input_points = len(points[0])
|
18 |
+
|
19 |
+
pred_r = pred_r / (torch.norm(pred_r, dim=2).view(bs, num_p, 1))
|
20 |
+
|
21 |
+
base = torch.cat(((1.0 - 2.0*(pred_r[:, :, 2]**2 + pred_r[:, :, 3]**2)).view(bs, num_p, 1),\
|
22 |
+
(2.0*pred_r[:, :, 1]*pred_r[:, :, 2] - 2.0*pred_r[:, :, 0]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
23 |
+
(2.0*pred_r[:, :, 0]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 1]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
24 |
+
(2.0*pred_r[:, :, 1]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 3]*pred_r[:, :, 0]).view(bs, num_p, 1), \
|
25 |
+
(1.0 - 2.0*(pred_r[:, :, 1]**2 + pred_r[:, :, 3]**2)).view(bs, num_p, 1), \
|
26 |
+
(-2.0*pred_r[:, :, 0]*pred_r[:, :, 1] + 2.0*pred_r[:, :, 2]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
27 |
+
(-2.0*pred_r[:, :, 0]*pred_r[:, :, 2] + 2.0*pred_r[:, :, 1]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
28 |
+
(2.0*pred_r[:, :, 0]*pred_r[:, :, 1] + 2.0*pred_r[:, :, 2]*pred_r[:, :, 3]).view(bs, num_p, 1), \
|
29 |
+
(1.0 - 2.0*(pred_r[:, :, 1]**2 + pred_r[:, :, 2]**2)).view(bs, num_p, 1)), dim=2).contiguous().view(bs * num_p, 3, 3)
|
30 |
+
|
31 |
+
ori_base = base
|
32 |
+
base = base.contiguous().transpose(2, 1).contiguous()
|
33 |
+
model_points = model_points.view(bs, 1, num_point_mesh, 3).repeat(1, num_p, 1, 1).view(bs * num_p, num_point_mesh, 3)
|
34 |
+
target = target.view(bs, 1, num_point_mesh, 3).repeat(1, num_p, 1, 1).view(bs * num_p, num_point_mesh, 3)
|
35 |
+
ori_target = target
|
36 |
+
pred_t = pred_t.contiguous().view(bs * num_p, 1, 3)
|
37 |
+
ori_t = pred_t
|
38 |
+
|
39 |
+
pred = torch.add(torch.bmm(model_points, base), pred_t)
|
40 |
+
|
41 |
+
if idx[0].item() in sym_list:
|
42 |
+
target = target[0].transpose(1, 0).contiguous().view(3, -1)
|
43 |
+
pred = pred.permute(2, 0, 1).contiguous().view(3, -1)
|
44 |
+
inds = knn(target.unsqueeze(0), pred.unsqueeze(0))
|
45 |
+
target = torch.index_select(target, 1, inds.view(-1) - 1)
|
46 |
+
target = target.view(3, bs * num_p, num_point_mesh).permute(1, 2, 0).contiguous()
|
47 |
+
pred = pred.view(3, bs * num_p, num_point_mesh).permute(1, 2, 0).contiguous()
|
48 |
+
|
49 |
+
dis = torch.mean(torch.norm((pred - target), dim=2), dim=1)
|
50 |
+
|
51 |
+
t = ori_t[0]
|
52 |
+
points = points.view(1, num_input_points, 3)
|
53 |
+
|
54 |
+
ori_base = ori_base[0].view(1, 3, 3).contiguous()
|
55 |
+
ori_t = t.repeat(bs * num_input_points, 1).contiguous().view(1, bs * num_input_points, 3)
|
56 |
+
new_points = torch.bmm((points - ori_t), ori_base).contiguous()
|
57 |
+
|
58 |
+
new_target = ori_target[0].view(1, num_point_mesh, 3).contiguous()
|
59 |
+
ori_t = t.repeat(num_point_mesh, 1).contiguous().view(1, num_point_mesh, 3)
|
60 |
+
new_target = torch.bmm((new_target - ori_t), ori_base).contiguous()
|
61 |
+
|
62 |
+
# print('------------> ', dis.item(), idx[0].item())
|
63 |
+
del knn
|
64 |
+
return dis, new_points.detach(), new_target.detach()
|
65 |
+
|
66 |
+
|
67 |
+
class Loss_refine(_Loss):
|
68 |
+
|
69 |
+
def __init__(self, num_points_mesh, sym_list):
|
70 |
+
super(Loss_refine, self).__init__(True)
|
71 |
+
self.num_pt_mesh = num_points_mesh
|
72 |
+
self.sym_list = sym_list
|
73 |
+
|
74 |
+
|
75 |
+
def forward(self, pred_r, pred_t, target, model_points, idx, points):
|
76 |
+
return loss_calculation(pred_r, pred_t, target, model_points, idx, points, self.num_pt_mesh, self.sym_list)
|
lib/network.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
import os
|
3 |
+
import random
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
import torch.nn.parallel
|
7 |
+
import torch.backends.cudnn as cudnn
|
8 |
+
import torch.optim as optim
|
9 |
+
import torch.utils.data
|
10 |
+
import torchvision.transforms as transforms
|
11 |
+
import torchvision.utils as vutils
|
12 |
+
from torch.autograd import Variable
|
13 |
+
from PIL import Image
|
14 |
+
import numpy as np
|
15 |
+
import pdb
|
16 |
+
import torch.nn.functional as F
|
17 |
+
from lib.pspnet import PSPNet
|
18 |
+
|
19 |
+
psp_models = {
|
20 |
+
'resnet18': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend='resnet18'),
|
21 |
+
'resnet34': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend='resnet34'),
|
22 |
+
'resnet50': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet50'),
|
23 |
+
'resnet101': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet101'),
|
24 |
+
'resnet152': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet152')
|
25 |
+
}
|
26 |
+
|
27 |
+
class ModifiedResnet(nn.Module):
|
28 |
+
|
29 |
+
def __init__(self, usegpu=True):
|
30 |
+
super(ModifiedResnet, self).__init__()
|
31 |
+
|
32 |
+
self.model = psp_models['resnet18'.lower()]()
|
33 |
+
self.model = nn.DataParallel(self.model)
|
34 |
+
|
35 |
+
def forward(self, x):
|
36 |
+
x = self.model(x)
|
37 |
+
return x
|
38 |
+
|
39 |
+
class PoseNetFeat(nn.Module):
|
40 |
+
def __init__(self, num_points):
|
41 |
+
super(PoseNetFeat, self).__init__()
|
42 |
+
self.conv1 = torch.nn.Conv1d(3, 64, 1)
|
43 |
+
self.conv2 = torch.nn.Conv1d(64, 128, 1)
|
44 |
+
|
45 |
+
self.e_conv1 = torch.nn.Conv1d(32, 64, 1)
|
46 |
+
self.e_conv2 = torch.nn.Conv1d(64, 128, 1)
|
47 |
+
|
48 |
+
self.conv5 = torch.nn.Conv1d(256, 512, 1)
|
49 |
+
self.conv6 = torch.nn.Conv1d(512, 1024, 1)
|
50 |
+
|
51 |
+
self.ap1 = torch.nn.AvgPool1d(num_points)
|
52 |
+
self.num_points = num_points
|
53 |
+
def forward(self, x, emb):
|
54 |
+
x = F.relu(self.conv1(x))
|
55 |
+
emb = F.relu(self.e_conv1(emb))
|
56 |
+
pointfeat_1 = torch.cat((x, emb), dim=1)
|
57 |
+
|
58 |
+
x = F.relu(self.conv2(x))
|
59 |
+
emb = F.relu(self.e_conv2(emb))
|
60 |
+
pointfeat_2 = torch.cat((x, emb), dim=1)
|
61 |
+
|
62 |
+
x = F.relu(self.conv5(pointfeat_2))
|
63 |
+
x = F.relu(self.conv6(x))
|
64 |
+
|
65 |
+
ap_x = self.ap1(x)
|
66 |
+
|
67 |
+
ap_x = ap_x.view(-1, 1024, 1).repeat(1, 1, self.num_points)
|
68 |
+
return torch.cat([pointfeat_1, pointfeat_2, ap_x], 1) #128 + 256 + 1024
|
69 |
+
|
70 |
+
class PoseNet(nn.Module):
|
71 |
+
def __init__(self, num_points, num_obj):
|
72 |
+
super(PoseNet, self).__init__()
|
73 |
+
self.num_points = num_points
|
74 |
+
self.cnn = ModifiedResnet()
|
75 |
+
self.feat = PoseNetFeat(num_points)
|
76 |
+
|
77 |
+
self.conv1_r = torch.nn.Conv1d(1408, 640, 1)
|
78 |
+
self.conv1_t = torch.nn.Conv1d(1408, 640, 1)
|
79 |
+
self.conv1_c = torch.nn.Conv1d(1408, 640, 1)
|
80 |
+
|
81 |
+
self.conv2_r = torch.nn.Conv1d(640, 256, 1)
|
82 |
+
self.conv2_t = torch.nn.Conv1d(640, 256, 1)
|
83 |
+
self.conv2_c = torch.nn.Conv1d(640, 256, 1)
|
84 |
+
|
85 |
+
self.conv3_r = torch.nn.Conv1d(256, 128, 1)
|
86 |
+
self.conv3_t = torch.nn.Conv1d(256, 128, 1)
|
87 |
+
self.conv3_c = torch.nn.Conv1d(256, 128, 1)
|
88 |
+
|
89 |
+
self.conv4_r = torch.nn.Conv1d(128, num_obj*4, 1) #quaternion
|
90 |
+
self.conv4_t = torch.nn.Conv1d(128, num_obj*3, 1) #translation
|
91 |
+
self.conv4_c = torch.nn.Conv1d(128, num_obj*1, 1) #confidence
|
92 |
+
|
93 |
+
self.num_obj = num_obj
|
94 |
+
|
95 |
+
def forward(self, img, x, choose, obj):
|
96 |
+
out_img = self.cnn(img)
|
97 |
+
|
98 |
+
bs, di, _, _ = out_img.size()
|
99 |
+
|
100 |
+
emb = out_img.view(bs, di, -1)
|
101 |
+
choose = choose.repeat(1, di, 1)
|
102 |
+
emb = torch.gather(emb, 2, choose).contiguous()
|
103 |
+
|
104 |
+
x = x.transpose(2, 1).contiguous()
|
105 |
+
ap_x = self.feat(x, emb)
|
106 |
+
|
107 |
+
rx = F.relu(self.conv1_r(ap_x))
|
108 |
+
tx = F.relu(self.conv1_t(ap_x))
|
109 |
+
cx = F.relu(self.conv1_c(ap_x))
|
110 |
+
|
111 |
+
rx = F.relu(self.conv2_r(rx))
|
112 |
+
tx = F.relu(self.conv2_t(tx))
|
113 |
+
cx = F.relu(self.conv2_c(cx))
|
114 |
+
|
115 |
+
rx = F.relu(self.conv3_r(rx))
|
116 |
+
tx = F.relu(self.conv3_t(tx))
|
117 |
+
cx = F.relu(self.conv3_c(cx))
|
118 |
+
|
119 |
+
rx = self.conv4_r(rx).view(bs, self.num_obj, 4, self.num_points)
|
120 |
+
tx = self.conv4_t(tx).view(bs, self.num_obj, 3, self.num_points)
|
121 |
+
cx = torch.sigmoid(self.conv4_c(cx)).view(bs, self.num_obj, 1, self.num_points)
|
122 |
+
|
123 |
+
b = 0
|
124 |
+
out_rx = torch.index_select(rx[b], 0, obj[b])
|
125 |
+
out_tx = torch.index_select(tx[b], 0, obj[b])
|
126 |
+
out_cx = torch.index_select(cx[b], 0, obj[b])
|
127 |
+
|
128 |
+
out_rx = out_rx.contiguous().transpose(2, 1).contiguous()
|
129 |
+
out_cx = out_cx.contiguous().transpose(2, 1).contiguous()
|
130 |
+
out_tx = out_tx.contiguous().transpose(2, 1).contiguous()
|
131 |
+
|
132 |
+
return out_rx, out_tx, out_cx, emb.detach()
|
133 |
+
|
134 |
+
|
135 |
+
|
136 |
+
class PoseRefineNetFeat(nn.Module):
|
137 |
+
def __init__(self, num_points):
|
138 |
+
super(PoseRefineNetFeat, self).__init__()
|
139 |
+
self.conv1 = torch.nn.Conv1d(3, 64, 1)
|
140 |
+
self.conv2 = torch.nn.Conv1d(64, 128, 1)
|
141 |
+
|
142 |
+
self.e_conv1 = torch.nn.Conv1d(32, 64, 1)
|
143 |
+
self.e_conv2 = torch.nn.Conv1d(64, 128, 1)
|
144 |
+
|
145 |
+
self.conv5 = torch.nn.Conv1d(384, 512, 1)
|
146 |
+
self.conv6 = torch.nn.Conv1d(512, 1024, 1)
|
147 |
+
|
148 |
+
self.ap1 = torch.nn.AvgPool1d(num_points)
|
149 |
+
self.num_points = num_points
|
150 |
+
|
151 |
+
def forward(self, x, emb):
|
152 |
+
x = F.relu(self.conv1(x))
|
153 |
+
emb = F.relu(self.e_conv1(emb))
|
154 |
+
pointfeat_1 = torch.cat([x, emb], dim=1)
|
155 |
+
|
156 |
+
x = F.relu(self.conv2(x))
|
157 |
+
emb = F.relu(self.e_conv2(emb))
|
158 |
+
pointfeat_2 = torch.cat([x, emb], dim=1)
|
159 |
+
|
160 |
+
pointfeat_3 = torch.cat([pointfeat_1, pointfeat_2], dim=1)
|
161 |
+
|
162 |
+
x = F.relu(self.conv5(pointfeat_3))
|
163 |
+
x = F.relu(self.conv6(x))
|
164 |
+
|
165 |
+
ap_x = self.ap1(x)
|
166 |
+
|
167 |
+
ap_x = ap_x.view(-1, 1024)
|
168 |
+
return ap_x
|
169 |
+
|
170 |
+
class PoseRefineNet(nn.Module):
|
171 |
+
def __init__(self, num_points, num_obj):
|
172 |
+
super(PoseRefineNet, self).__init__()
|
173 |
+
self.num_points = num_points
|
174 |
+
self.feat = PoseRefineNetFeat(num_points)
|
175 |
+
|
176 |
+
self.conv1_r = torch.nn.Linear(1024, 512)
|
177 |
+
self.conv1_t = torch.nn.Linear(1024, 512)
|
178 |
+
|
179 |
+
self.conv2_r = torch.nn.Linear(512, 128)
|
180 |
+
self.conv2_t = torch.nn.Linear(512, 128)
|
181 |
+
|
182 |
+
self.conv3_r = torch.nn.Linear(128, num_obj*4) #quaternion
|
183 |
+
self.conv3_t = torch.nn.Linear(128, num_obj*3) #translation
|
184 |
+
|
185 |
+
self.num_obj = num_obj
|
186 |
+
|
187 |
+
def forward(self, x, emb, obj):
|
188 |
+
bs = x.size()[0]
|
189 |
+
|
190 |
+
x = x.transpose(2, 1).contiguous()
|
191 |
+
ap_x = self.feat(x, emb)
|
192 |
+
|
193 |
+
rx = F.relu(self.conv1_r(ap_x))
|
194 |
+
tx = F.relu(self.conv1_t(ap_x))
|
195 |
+
|
196 |
+
rx = F.relu(self.conv2_r(rx))
|
197 |
+
tx = F.relu(self.conv2_t(tx))
|
198 |
+
|
199 |
+
rx = self.conv3_r(rx).view(bs, self.num_obj, 4)
|
200 |
+
tx = self.conv3_t(tx).view(bs, self.num_obj, 3)
|
201 |
+
|
202 |
+
b = 0
|
203 |
+
out_rx = torch.index_select(rx[b], 0, obj[b])
|
204 |
+
out_tx = torch.index_select(tx[b], 0, obj[b])
|
205 |
+
|
206 |
+
return out_rx, out_tx
|
lib/pspnet.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from torch import nn
|
3 |
+
from torch.nn import functional as F
|
4 |
+
import lib.extractors as extractors
|
5 |
+
|
6 |
+
|
7 |
+
class PSPModule(nn.Module):
|
8 |
+
def __init__(self, features, out_features=1024, sizes=(1, 2, 3, 6)):
|
9 |
+
super(PSPModule, self).__init__()
|
10 |
+
self.stages = []
|
11 |
+
self.stages = nn.ModuleList([self._make_stage(features, size) for size in sizes])
|
12 |
+
self.bottleneck = nn.Conv2d(features * (len(sizes) + 1), out_features, kernel_size=1)
|
13 |
+
self.relu = nn.ReLU()
|
14 |
+
|
15 |
+
def _make_stage(self, features, size):
|
16 |
+
prior = nn.AdaptiveAvgPool2d(output_size=(size, size))
|
17 |
+
conv = nn.Conv2d(features, features, kernel_size=1, bias=False)
|
18 |
+
return nn.Sequential(prior, conv)
|
19 |
+
|
20 |
+
def forward(self, feats):
|
21 |
+
h, w = feats.size(2), feats.size(3)
|
22 |
+
priors = [F.upsample(input=stage(feats), size=(h, w), mode='bilinear') for stage in self.stages] + [feats]
|
23 |
+
bottle = self.bottleneck(torch.cat(priors, 1))
|
24 |
+
return self.relu(bottle)
|
25 |
+
|
26 |
+
|
27 |
+
class PSPUpsample(nn.Module):
|
28 |
+
def __init__(self, in_channels, out_channels):
|
29 |
+
super(PSPUpsample, self).__init__()
|
30 |
+
self.conv = nn.Sequential(
|
31 |
+
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True),
|
32 |
+
nn.Conv2d(in_channels, out_channels, 3, padding=1),
|
33 |
+
nn.PReLU()
|
34 |
+
)
|
35 |
+
|
36 |
+
def forward(self, x):
|
37 |
+
return self.conv(x)
|
38 |
+
|
39 |
+
|
40 |
+
class PSPNet(nn.Module):
|
41 |
+
def __init__(self, n_classes=21, sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet18',
|
42 |
+
pretrained=False):
|
43 |
+
super(PSPNet, self).__init__()
|
44 |
+
self.feats = getattr(extractors, backend)(pretrained)
|
45 |
+
self.psp = PSPModule(psp_size, 1024, sizes)
|
46 |
+
self.drop_1 = nn.Dropout2d(p=0.3)
|
47 |
+
|
48 |
+
self.up_1 = PSPUpsample(1024, 256)
|
49 |
+
self.up_2 = PSPUpsample(256, 64)
|
50 |
+
self.up_3 = PSPUpsample(64, 64)
|
51 |
+
|
52 |
+
self.drop_2 = nn.Dropout2d(p=0.15)
|
53 |
+
self.final = nn.Sequential(
|
54 |
+
nn.Conv2d(64, 32, kernel_size=1),
|
55 |
+
nn.LogSoftmax()
|
56 |
+
)
|
57 |
+
|
58 |
+
self.classifier = nn.Sequential(
|
59 |
+
nn.Linear(deep_features_size, 256),
|
60 |
+
nn.ReLU(),
|
61 |
+
nn.Linear(256, n_classes)
|
62 |
+
)
|
63 |
+
|
64 |
+
def forward(self, x):
|
65 |
+
f, class_f = self.feats(x)
|
66 |
+
p = self.psp(f)
|
67 |
+
p = self.drop_1(p)
|
68 |
+
|
69 |
+
p = self.up_1(p)
|
70 |
+
p = self.drop_2(p)
|
71 |
+
|
72 |
+
p = self.up_2(p)
|
73 |
+
p = self.drop_2(p)
|
74 |
+
|
75 |
+
p = self.up_3(p)
|
76 |
+
|
77 |
+
return self.final(p)
|
lib/transformations.py
ADDED
@@ -0,0 +1,1935 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# transformations.py
|
3 |
+
|
4 |
+
# Copyright (c) 2006-2018, Christoph Gohlke
|
5 |
+
# Copyright (c) 2006-2018, The Regents of the University of California
|
6 |
+
# Produced at the Laboratory for Fluorescence Dynamics
|
7 |
+
# All rights reserved.
|
8 |
+
#
|
9 |
+
# Redistribution and use in source and binary forms, with or without
|
10 |
+
# modification, are permitted provided that the following conditions are met:
|
11 |
+
#
|
12 |
+
# * Redistributions of source code must retain the above copyright
|
13 |
+
# notice, this list of conditions and the following disclaimer.
|
14 |
+
# * Redistributions in binary form must reproduce the above copyright
|
15 |
+
# notice, this list of conditions and the following disclaimer in the
|
16 |
+
# documentation and/or other materials provided with the distribution.
|
17 |
+
# * Neither the name of the copyright holders nor the names of any
|
18 |
+
# contributors may be used to endorse or promote products derived
|
19 |
+
# from this software without specific prior written permission.
|
20 |
+
#
|
21 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
22 |
+
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
23 |
+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
24 |
+
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
25 |
+
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
26 |
+
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
27 |
+
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
28 |
+
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
29 |
+
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
30 |
+
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
31 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
32 |
+
|
33 |
+
"""Homogeneous Transformation Matrices and Quaternions.
|
34 |
+
|
35 |
+
A library for calculating 4x4 matrices for translating, rotating, reflecting,
|
36 |
+
scaling, shearing, projecting, orthogonalizing, and superimposing arrays of
|
37 |
+
3D homogeneous coordinates as well as for converting between rotation matrices,
|
38 |
+
Euler angles, and quaternions. Also includes an Arcball control object and
|
39 |
+
functions to decompose transformation matrices.
|
40 |
+
|
41 |
+
:Author:
|
42 |
+
`Christoph Gohlke <https://www.lfd.uci.edu/~gohlke/>`_
|
43 |
+
|
44 |
+
:Organization:
|
45 |
+
Laboratory for Fluorescence Dynamics, University of California, Irvine
|
46 |
+
|
47 |
+
:Version: 2018.02.08
|
48 |
+
|
49 |
+
Requirements
|
50 |
+
------------
|
51 |
+
* `CPython 2.7 or 3.6 <http://www.python.org>`_
|
52 |
+
* `Numpy 1.13 <http://www.numpy.org>`_
|
53 |
+
* `Transformations.c 2018.02.08 <https://www.lfd.uci.edu/~gohlke/>`_
|
54 |
+
(recommended for speedup of some functions)
|
55 |
+
|
56 |
+
Notes
|
57 |
+
-----
|
58 |
+
The API is not stable yet and is expected to change between revisions.
|
59 |
+
|
60 |
+
This Python code is not optimized for speed. Refer to the transformations.c
|
61 |
+
module for a faster implementation of some functions.
|
62 |
+
|
63 |
+
Documentation in HTML format can be generated with epydoc.
|
64 |
+
|
65 |
+
Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using
|
66 |
+
numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using
|
67 |
+
numpy.dot(M, v) for shape (4, \*) column vectors, respectively
|
68 |
+
numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points").
|
69 |
+
|
70 |
+
This module follows the "column vectors on the right" and "row major storage"
|
71 |
+
(C contiguous) conventions. The translation components are in the right column
|
72 |
+
of the transformation matrix, i.e. M[:3, 3].
|
73 |
+
The transpose of the transformation matrices may have to be used to interface
|
74 |
+
with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16].
|
75 |
+
|
76 |
+
Calculations are carried out with numpy.float64 precision.
|
77 |
+
|
78 |
+
Vector, point, quaternion, and matrix function arguments are expected to be
|
79 |
+
"array like", i.e. tuple, list, or numpy arrays.
|
80 |
+
|
81 |
+
Return types are numpy arrays unless specified otherwise.
|
82 |
+
|
83 |
+
Angles are in radians unless specified otherwise.
|
84 |
+
|
85 |
+
Quaternions w+ix+jy+kz are represented as [w, x, y, z].
|
86 |
+
|
87 |
+
A triple of Euler angles can be applied/interpreted in 24 ways, which can
|
88 |
+
be specified using a 4 character string or encoded 4-tuple:
|
89 |
+
|
90 |
+
*Axes 4-string*: e.g. 'sxyz' or 'ryxy'
|
91 |
+
|
92 |
+
- first character : rotations are applied to 's'tatic or 'r'otating frame
|
93 |
+
- remaining characters : successive rotation axis 'x', 'y', or 'z'
|
94 |
+
|
95 |
+
*Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1)
|
96 |
+
|
97 |
+
- inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix.
|
98 |
+
- parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed
|
99 |
+
by 'z', or 'z' is followed by 'x'. Otherwise odd (1).
|
100 |
+
- repetition : first and last axis are same (1) or different (0).
|
101 |
+
- frame : rotations are applied to static (0) or rotating (1) frame.
|
102 |
+
|
103 |
+
Other Python packages and modules for 3D transformations and quaternions:
|
104 |
+
|
105 |
+
* `Transforms3d <https://pypi.python.org/pypi/transforms3d>`_
|
106 |
+
includes most code of this module.
|
107 |
+
* `Blender.mathutils <http://www.blender.org/api/blender_python_api>`_
|
108 |
+
* `numpy-dtypes <https://github.com/numpy/numpy-dtypes>`_
|
109 |
+
|
110 |
+
References
|
111 |
+
----------
|
112 |
+
(1) Matrices and transformations. Ronald Goldman.
|
113 |
+
In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990.
|
114 |
+
(2) More matrices and transformations: shear and pseudo-perspective.
|
115 |
+
Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
|
116 |
+
(3) Decomposing a matrix into simple transformations. Spencer Thomas.
|
117 |
+
In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
|
118 |
+
(4) Recovering the data from the transformation matrix. Ronald Goldman.
|
119 |
+
In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991.
|
120 |
+
(5) Euler angle conversion. Ken Shoemake.
|
121 |
+
In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994.
|
122 |
+
(6) Arcball rotation control. Ken Shoemake.
|
123 |
+
In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994.
|
124 |
+
(7) Representing attitude: Euler angles, unit quaternions, and rotation
|
125 |
+
vectors. James Diebel. 2006.
|
126 |
+
(8) A discussion of the solution for the best rotation to relate two sets
|
127 |
+
of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828.
|
128 |
+
(9) Closed-form solution of absolute orientation using unit quaternions.
|
129 |
+
BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642.
|
130 |
+
(10) Quaternions. Ken Shoemake.
|
131 |
+
http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf
|
132 |
+
(11) From quaternion to matrix and back. JMP van Waveren. 2005.
|
133 |
+
http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm
|
134 |
+
(12) Uniform random rotations. Ken Shoemake.
|
135 |
+
In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992.
|
136 |
+
(13) Quaternion in molecular modeling. CFF Karney.
|
137 |
+
J Mol Graph Mod, 25(5):595-604
|
138 |
+
(14) New method for extracting the quaternion from a rotation matrix.
|
139 |
+
Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087.
|
140 |
+
(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann.
|
141 |
+
Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130.
|
142 |
+
(16) Column Vectors vs. Row Vectors.
|
143 |
+
http://steve.hollasch.net/cgindex/math/matrix/column-vec.html
|
144 |
+
|
145 |
+
Examples
|
146 |
+
--------
|
147 |
+
>>> alpha, beta, gamma = 0.123, -1.234, 2.345
|
148 |
+
>>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]
|
149 |
+
>>> I = identity_matrix()
|
150 |
+
>>> Rx = rotation_matrix(alpha, xaxis)
|
151 |
+
>>> Ry = rotation_matrix(beta, yaxis)
|
152 |
+
>>> Rz = rotation_matrix(gamma, zaxis)
|
153 |
+
>>> R = concatenate_matrices(Rx, Ry, Rz)
|
154 |
+
>>> euler = euler_from_matrix(R, 'rxyz')
|
155 |
+
>>> numpy.allclose([alpha, beta, gamma], euler)
|
156 |
+
True
|
157 |
+
>>> Re = euler_matrix(alpha, beta, gamma, 'rxyz')
|
158 |
+
>>> is_same_transform(R, Re)
|
159 |
+
True
|
160 |
+
>>> al, be, ga = euler_from_matrix(Re, 'rxyz')
|
161 |
+
>>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz'))
|
162 |
+
True
|
163 |
+
>>> qx = quaternion_about_axis(alpha, xaxis)
|
164 |
+
>>> qy = quaternion_about_axis(beta, yaxis)
|
165 |
+
>>> qz = quaternion_about_axis(gamma, zaxis)
|
166 |
+
>>> q = quaternion_multiply(qx, qy)
|
167 |
+
>>> q = quaternion_multiply(q, qz)
|
168 |
+
>>> Rq = quaternion_matrix(q)
|
169 |
+
>>> is_same_transform(R, Rq)
|
170 |
+
True
|
171 |
+
>>> S = scale_matrix(1.23, origin)
|
172 |
+
>>> T = translation_matrix([1, 2, 3])
|
173 |
+
>>> Z = shear_matrix(beta, xaxis, origin, zaxis)
|
174 |
+
>>> R = random_rotation_matrix(numpy.random.rand(3))
|
175 |
+
>>> M = concatenate_matrices(T, R, Z, S)
|
176 |
+
>>> scale, shear, angles, trans, persp = decompose_matrix(M)
|
177 |
+
>>> numpy.allclose(scale, 1.23)
|
178 |
+
True
|
179 |
+
>>> numpy.allclose(trans, [1, 2, 3])
|
180 |
+
True
|
181 |
+
>>> numpy.allclose(shear, [0, math.tan(beta), 0])
|
182 |
+
True
|
183 |
+
>>> is_same_transform(R, euler_matrix(axes='sxyz', *angles))
|
184 |
+
True
|
185 |
+
>>> M1 = compose_matrix(scale, shear, angles, trans, persp)
|
186 |
+
>>> is_same_transform(M, M1)
|
187 |
+
True
|
188 |
+
>>> v0, v1 = random_vector(3), random_vector(3)
|
189 |
+
>>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1))
|
190 |
+
>>> v2 = numpy.dot(v0, M[:3,:3].T)
|
191 |
+
>>> numpy.allclose(unit_vector(v1), unit_vector(v2))
|
192 |
+
True
|
193 |
+
|
194 |
+
"""
|
195 |
+
|
196 |
+
from __future__ import division, print_function
|
197 |
+
|
198 |
+
import math
|
199 |
+
|
200 |
+
import numpy
|
201 |
+
|
202 |
+
__version__ = '2018.02.08'
|
203 |
+
__docformat__ = 'restructuredtext en'
|
204 |
+
__all__ = ()
|
205 |
+
|
206 |
+
|
207 |
+
def identity_matrix():
|
208 |
+
"""Return 4x4 identity/unit matrix.
|
209 |
+
|
210 |
+
>>> I = identity_matrix()
|
211 |
+
>>> numpy.allclose(I, numpy.dot(I, I))
|
212 |
+
True
|
213 |
+
>>> numpy.sum(I), numpy.trace(I)
|
214 |
+
(4.0, 4.0)
|
215 |
+
>>> numpy.allclose(I, numpy.identity(4))
|
216 |
+
True
|
217 |
+
|
218 |
+
"""
|
219 |
+
return numpy.identity(4)
|
220 |
+
|
221 |
+
|
222 |
+
def translation_matrix(direction):
|
223 |
+
"""Return matrix to translate by direction vector.
|
224 |
+
|
225 |
+
>>> v = numpy.random.random(3) - 0.5
|
226 |
+
>>> numpy.allclose(v, translation_matrix(v)[:3, 3])
|
227 |
+
True
|
228 |
+
|
229 |
+
"""
|
230 |
+
M = numpy.identity(4)
|
231 |
+
M[:3, 3] = direction[:3]
|
232 |
+
return M
|
233 |
+
|
234 |
+
|
235 |
+
def translation_from_matrix(matrix):
|
236 |
+
"""Return translation vector from translation matrix.
|
237 |
+
|
238 |
+
>>> v0 = numpy.random.random(3) - 0.5
|
239 |
+
>>> v1 = translation_from_matrix(translation_matrix(v0))
|
240 |
+
>>> numpy.allclose(v0, v1)
|
241 |
+
True
|
242 |
+
|
243 |
+
"""
|
244 |
+
return numpy.array(matrix, copy=False)[:3, 3].copy()
|
245 |
+
|
246 |
+
|
247 |
+
def reflection_matrix(point, normal):
|
248 |
+
"""Return matrix to mirror at plane defined by point and normal vector.
|
249 |
+
|
250 |
+
>>> v0 = numpy.random.random(4) - 0.5
|
251 |
+
>>> v0[3] = 1.
|
252 |
+
>>> v1 = numpy.random.random(3) - 0.5
|
253 |
+
>>> R = reflection_matrix(v0, v1)
|
254 |
+
>>> numpy.allclose(2, numpy.trace(R))
|
255 |
+
True
|
256 |
+
>>> numpy.allclose(v0, numpy.dot(R, v0))
|
257 |
+
True
|
258 |
+
>>> v2 = v0.copy()
|
259 |
+
>>> v2[:3] += v1
|
260 |
+
>>> v3 = v0.copy()
|
261 |
+
>>> v2[:3] -= v1
|
262 |
+
>>> numpy.allclose(v2, numpy.dot(R, v3))
|
263 |
+
True
|
264 |
+
|
265 |
+
"""
|
266 |
+
normal = unit_vector(normal[:3])
|
267 |
+
M = numpy.identity(4)
|
268 |
+
M[:3, :3] -= 2.0 * numpy.outer(normal, normal)
|
269 |
+
M[:3, 3] = (2.0 * numpy.dot(point[:3], normal)) * normal
|
270 |
+
return M
|
271 |
+
|
272 |
+
|
273 |
+
def reflection_from_matrix(matrix):
|
274 |
+
"""Return mirror plane point and normal vector from reflection matrix.
|
275 |
+
|
276 |
+
>>> v0 = numpy.random.random(3) - 0.5
|
277 |
+
>>> v1 = numpy.random.random(3) - 0.5
|
278 |
+
>>> M0 = reflection_matrix(v0, v1)
|
279 |
+
>>> point, normal = reflection_from_matrix(M0)
|
280 |
+
>>> M1 = reflection_matrix(point, normal)
|
281 |
+
>>> is_same_transform(M0, M1)
|
282 |
+
True
|
283 |
+
|
284 |
+
"""
|
285 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
|
286 |
+
# normal: unit eigenvector corresponding to eigenvalue -1
|
287 |
+
w, V = numpy.linalg.eig(M[:3, :3])
|
288 |
+
i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0]
|
289 |
+
if not len(i):
|
290 |
+
raise ValueError('no unit eigenvector corresponding to eigenvalue -1')
|
291 |
+
normal = numpy.real(V[:, i[0]]).squeeze()
|
292 |
+
# point: any unit eigenvector corresponding to eigenvalue 1
|
293 |
+
w, V = numpy.linalg.eig(M)
|
294 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
295 |
+
if not len(i):
|
296 |
+
raise ValueError('no unit eigenvector corresponding to eigenvalue 1')
|
297 |
+
point = numpy.real(V[:, i[-1]]).squeeze()
|
298 |
+
point /= point[3]
|
299 |
+
return point, normal
|
300 |
+
|
301 |
+
|
302 |
+
def rotation_matrix(angle, direction, point=None):
|
303 |
+
"""Return matrix to rotate about axis defined by point and direction.
|
304 |
+
|
305 |
+
>>> R = rotation_matrix(math.pi/2, [0, 0, 1], [1, 0, 0])
|
306 |
+
>>> numpy.allclose(numpy.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1])
|
307 |
+
True
|
308 |
+
>>> angle = (random.random() - 0.5) * (2*math.pi)
|
309 |
+
>>> direc = numpy.random.random(3) - 0.5
|
310 |
+
>>> point = numpy.random.random(3) - 0.5
|
311 |
+
>>> R0 = rotation_matrix(angle, direc, point)
|
312 |
+
>>> R1 = rotation_matrix(angle-2*math.pi, direc, point)
|
313 |
+
>>> is_same_transform(R0, R1)
|
314 |
+
True
|
315 |
+
>>> R0 = rotation_matrix(angle, direc, point)
|
316 |
+
>>> R1 = rotation_matrix(-angle, -direc, point)
|
317 |
+
>>> is_same_transform(R0, R1)
|
318 |
+
True
|
319 |
+
>>> I = numpy.identity(4, numpy.float64)
|
320 |
+
>>> numpy.allclose(I, rotation_matrix(math.pi*2, direc))
|
321 |
+
True
|
322 |
+
>>> numpy.allclose(2, numpy.trace(rotation_matrix(math.pi/2,
|
323 |
+
... direc, point)))
|
324 |
+
True
|
325 |
+
|
326 |
+
"""
|
327 |
+
sina = math.sin(angle)
|
328 |
+
cosa = math.cos(angle)
|
329 |
+
direction = unit_vector(direction[:3])
|
330 |
+
# rotation matrix around unit vector
|
331 |
+
R = numpy.diag([cosa, cosa, cosa])
|
332 |
+
R += numpy.outer(direction, direction) * (1.0 - cosa)
|
333 |
+
direction *= sina
|
334 |
+
R += numpy.array([[ 0.0, -direction[2], direction[1]],
|
335 |
+
[ direction[2], 0.0, -direction[0]],
|
336 |
+
[-direction[1], direction[0], 0.0]])
|
337 |
+
M = numpy.identity(4)
|
338 |
+
M[:3, :3] = R
|
339 |
+
if point is not None:
|
340 |
+
# rotation not around origin
|
341 |
+
point = numpy.array(point[:3], dtype=numpy.float64, copy=False)
|
342 |
+
M[:3, 3] = point - numpy.dot(R, point)
|
343 |
+
return M
|
344 |
+
|
345 |
+
|
346 |
+
def rotation_from_matrix(matrix):
|
347 |
+
"""Return rotation angle and axis from rotation matrix.
|
348 |
+
|
349 |
+
>>> angle = (random.random() - 0.5) * (2*math.pi)
|
350 |
+
>>> direc = numpy.random.random(3) - 0.5
|
351 |
+
>>> point = numpy.random.random(3) - 0.5
|
352 |
+
>>> R0 = rotation_matrix(angle, direc, point)
|
353 |
+
>>> angle, direc, point = rotation_from_matrix(R0)
|
354 |
+
>>> R1 = rotation_matrix(angle, direc, point)
|
355 |
+
>>> is_same_transform(R0, R1)
|
356 |
+
True
|
357 |
+
|
358 |
+
"""
|
359 |
+
R = numpy.array(matrix, dtype=numpy.float64, copy=False)
|
360 |
+
R33 = R[:3, :3]
|
361 |
+
# direction: unit eigenvector of R33 corresponding to eigenvalue of 1
|
362 |
+
w, W = numpy.linalg.eig(R33.T)
|
363 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
364 |
+
if not len(i):
|
365 |
+
raise ValueError('no unit eigenvector corresponding to eigenvalue 1')
|
366 |
+
direction = numpy.real(W[:, i[-1]]).squeeze()
|
367 |
+
# point: unit eigenvector of R33 corresponding to eigenvalue of 1
|
368 |
+
w, Q = numpy.linalg.eig(R)
|
369 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
370 |
+
if not len(i):
|
371 |
+
raise ValueError('no unit eigenvector corresponding to eigenvalue 1')
|
372 |
+
point = numpy.real(Q[:, i[-1]]).squeeze()
|
373 |
+
point /= point[3]
|
374 |
+
# rotation angle depending on direction
|
375 |
+
cosa = (numpy.trace(R33) - 1.0) / 2.0
|
376 |
+
if abs(direction[2]) > 1e-8:
|
377 |
+
sina = (R[1, 0] + (cosa-1.0)*direction[0]*direction[1]) / direction[2]
|
378 |
+
elif abs(direction[1]) > 1e-8:
|
379 |
+
sina = (R[0, 2] + (cosa-1.0)*direction[0]*direction[2]) / direction[1]
|
380 |
+
else:
|
381 |
+
sina = (R[2, 1] + (cosa-1.0)*direction[1]*direction[2]) / direction[0]
|
382 |
+
angle = math.atan2(sina, cosa)
|
383 |
+
return angle, direction, point
|
384 |
+
|
385 |
+
|
386 |
+
def scale_matrix(factor, origin=None, direction=None):
|
387 |
+
"""Return matrix to scale by factor around origin in direction.
|
388 |
+
|
389 |
+
Use factor -1 for point symmetry.
|
390 |
+
|
391 |
+
>>> v = (numpy.random.rand(4, 5) - 0.5) * 20
|
392 |
+
>>> v[3] = 1
|
393 |
+
>>> S = scale_matrix(-1.234)
|
394 |
+
>>> numpy.allclose(numpy.dot(S, v)[:3], -1.234*v[:3])
|
395 |
+
True
|
396 |
+
>>> factor = random.random() * 10 - 5
|
397 |
+
>>> origin = numpy.random.random(3) - 0.5
|
398 |
+
>>> direct = numpy.random.random(3) - 0.5
|
399 |
+
>>> S = scale_matrix(factor, origin)
|
400 |
+
>>> S = scale_matrix(factor, origin, direct)
|
401 |
+
|
402 |
+
"""
|
403 |
+
if direction is None:
|
404 |
+
# uniform scaling
|
405 |
+
M = numpy.diag([factor, factor, factor, 1.0])
|
406 |
+
if origin is not None:
|
407 |
+
M[:3, 3] = origin[:3]
|
408 |
+
M[:3, 3] *= 1.0 - factor
|
409 |
+
else:
|
410 |
+
# nonuniform scaling
|
411 |
+
direction = unit_vector(direction[:3])
|
412 |
+
factor = 1.0 - factor
|
413 |
+
M = numpy.identity(4)
|
414 |
+
M[:3, :3] -= factor * numpy.outer(direction, direction)
|
415 |
+
if origin is not None:
|
416 |
+
M[:3, 3] = (factor * numpy.dot(origin[:3], direction)) * direction
|
417 |
+
return M
|
418 |
+
|
419 |
+
|
420 |
+
def scale_from_matrix(matrix):
|
421 |
+
"""Return scaling factor, origin and direction from scaling matrix.
|
422 |
+
|
423 |
+
>>> factor = random.random() * 10 - 5
|
424 |
+
>>> origin = numpy.random.random(3) - 0.5
|
425 |
+
>>> direct = numpy.random.random(3) - 0.5
|
426 |
+
>>> S0 = scale_matrix(factor, origin)
|
427 |
+
>>> factor, origin, direction = scale_from_matrix(S0)
|
428 |
+
>>> S1 = scale_matrix(factor, origin, direction)
|
429 |
+
>>> is_same_transform(S0, S1)
|
430 |
+
True
|
431 |
+
>>> S0 = scale_matrix(factor, origin, direct)
|
432 |
+
>>> factor, origin, direction = scale_from_matrix(S0)
|
433 |
+
>>> S1 = scale_matrix(factor, origin, direction)
|
434 |
+
>>> is_same_transform(S0, S1)
|
435 |
+
True
|
436 |
+
|
437 |
+
"""
|
438 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
|
439 |
+
M33 = M[:3, :3]
|
440 |
+
factor = numpy.trace(M33) - 2.0
|
441 |
+
try:
|
442 |
+
# direction: unit eigenvector corresponding to eigenvalue factor
|
443 |
+
w, V = numpy.linalg.eig(M33)
|
444 |
+
i = numpy.where(abs(numpy.real(w) - factor) < 1e-8)[0][0]
|
445 |
+
direction = numpy.real(V[:, i]).squeeze()
|
446 |
+
direction /= vector_norm(direction)
|
447 |
+
except IndexError:
|
448 |
+
# uniform scaling
|
449 |
+
factor = (factor + 2.0) / 3.0
|
450 |
+
direction = None
|
451 |
+
# origin: any eigenvector corresponding to eigenvalue 1
|
452 |
+
w, V = numpy.linalg.eig(M)
|
453 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
454 |
+
if not len(i):
|
455 |
+
raise ValueError('no eigenvector corresponding to eigenvalue 1')
|
456 |
+
origin = numpy.real(V[:, i[-1]]).squeeze()
|
457 |
+
origin /= origin[3]
|
458 |
+
return factor, origin, direction
|
459 |
+
|
460 |
+
|
461 |
+
def projection_matrix(point, normal, direction=None,
|
462 |
+
perspective=None, pseudo=False):
|
463 |
+
"""Return matrix to project onto plane defined by point and normal.
|
464 |
+
|
465 |
+
Using either perspective point, projection direction, or none of both.
|
466 |
+
|
467 |
+
If pseudo is True, perspective projections will preserve relative depth
|
468 |
+
such that Perspective = dot(Orthogonal, PseudoPerspective).
|
469 |
+
|
470 |
+
>>> P = projection_matrix([0, 0, 0], [1, 0, 0])
|
471 |
+
>>> numpy.allclose(P[1:, 1:], numpy.identity(4)[1:, 1:])
|
472 |
+
True
|
473 |
+
>>> point = numpy.random.random(3) - 0.5
|
474 |
+
>>> normal = numpy.random.random(3) - 0.5
|
475 |
+
>>> direct = numpy.random.random(3) - 0.5
|
476 |
+
>>> persp = numpy.random.random(3) - 0.5
|
477 |
+
>>> P0 = projection_matrix(point, normal)
|
478 |
+
>>> P1 = projection_matrix(point, normal, direction=direct)
|
479 |
+
>>> P2 = projection_matrix(point, normal, perspective=persp)
|
480 |
+
>>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True)
|
481 |
+
>>> is_same_transform(P2, numpy.dot(P0, P3))
|
482 |
+
True
|
483 |
+
>>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0])
|
484 |
+
>>> v0 = (numpy.random.rand(4, 5) - 0.5) * 20
|
485 |
+
>>> v0[3] = 1
|
486 |
+
>>> v1 = numpy.dot(P, v0)
|
487 |
+
>>> numpy.allclose(v1[1], v0[1])
|
488 |
+
True
|
489 |
+
>>> numpy.allclose(v1[0], 3-v1[1])
|
490 |
+
True
|
491 |
+
|
492 |
+
"""
|
493 |
+
M = numpy.identity(4)
|
494 |
+
point = numpy.array(point[:3], dtype=numpy.float64, copy=False)
|
495 |
+
normal = unit_vector(normal[:3])
|
496 |
+
if perspective is not None:
|
497 |
+
# perspective projection
|
498 |
+
perspective = numpy.array(perspective[:3], dtype=numpy.float64,
|
499 |
+
copy=False)
|
500 |
+
M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective-point, normal)
|
501 |
+
M[:3, :3] -= numpy.outer(perspective, normal)
|
502 |
+
if pseudo:
|
503 |
+
# preserve relative depth
|
504 |
+
M[:3, :3] -= numpy.outer(normal, normal)
|
505 |
+
M[:3, 3] = numpy.dot(point, normal) * (perspective+normal)
|
506 |
+
else:
|
507 |
+
M[:3, 3] = numpy.dot(point, normal) * perspective
|
508 |
+
M[3, :3] = -normal
|
509 |
+
M[3, 3] = numpy.dot(perspective, normal)
|
510 |
+
elif direction is not None:
|
511 |
+
# parallel projection
|
512 |
+
direction = numpy.array(direction[:3], dtype=numpy.float64, copy=False)
|
513 |
+
scale = numpy.dot(direction, normal)
|
514 |
+
M[:3, :3] -= numpy.outer(direction, normal) / scale
|
515 |
+
M[:3, 3] = direction * (numpy.dot(point, normal) / scale)
|
516 |
+
else:
|
517 |
+
# orthogonal projection
|
518 |
+
M[:3, :3] -= numpy.outer(normal, normal)
|
519 |
+
M[:3, 3] = numpy.dot(point, normal) * normal
|
520 |
+
return M
|
521 |
+
|
522 |
+
|
523 |
+
def projection_from_matrix(matrix, pseudo=False):
|
524 |
+
"""Return projection plane and perspective point from projection matrix.
|
525 |
+
|
526 |
+
Return values are same as arguments for projection_matrix function:
|
527 |
+
point, normal, direction, perspective, and pseudo.
|
528 |
+
|
529 |
+
>>> point = numpy.random.random(3) - 0.5
|
530 |
+
>>> normal = numpy.random.random(3) - 0.5
|
531 |
+
>>> direct = numpy.random.random(3) - 0.5
|
532 |
+
>>> persp = numpy.random.random(3) - 0.5
|
533 |
+
>>> P0 = projection_matrix(point, normal)
|
534 |
+
>>> result = projection_from_matrix(P0)
|
535 |
+
>>> P1 = projection_matrix(*result)
|
536 |
+
>>> is_same_transform(P0, P1)
|
537 |
+
True
|
538 |
+
>>> P0 = projection_matrix(point, normal, direct)
|
539 |
+
>>> result = projection_from_matrix(P0)
|
540 |
+
>>> P1 = projection_matrix(*result)
|
541 |
+
>>> is_same_transform(P0, P1)
|
542 |
+
True
|
543 |
+
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False)
|
544 |
+
>>> result = projection_from_matrix(P0, pseudo=False)
|
545 |
+
>>> P1 = projection_matrix(*result)
|
546 |
+
>>> is_same_transform(P0, P1)
|
547 |
+
True
|
548 |
+
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True)
|
549 |
+
>>> result = projection_from_matrix(P0, pseudo=True)
|
550 |
+
>>> P1 = projection_matrix(*result)
|
551 |
+
>>> is_same_transform(P0, P1)
|
552 |
+
True
|
553 |
+
|
554 |
+
"""
|
555 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
|
556 |
+
M33 = M[:3, :3]
|
557 |
+
w, V = numpy.linalg.eig(M)
|
558 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
559 |
+
if not pseudo and len(i):
|
560 |
+
# point: any eigenvector corresponding to eigenvalue 1
|
561 |
+
point = numpy.real(V[:, i[-1]]).squeeze()
|
562 |
+
point /= point[3]
|
563 |
+
# direction: unit eigenvector corresponding to eigenvalue 0
|
564 |
+
w, V = numpy.linalg.eig(M33)
|
565 |
+
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
|
566 |
+
if not len(i):
|
567 |
+
raise ValueError('no eigenvector corresponding to eigenvalue 0')
|
568 |
+
direction = numpy.real(V[:, i[0]]).squeeze()
|
569 |
+
direction /= vector_norm(direction)
|
570 |
+
# normal: unit eigenvector of M33.T corresponding to eigenvalue 0
|
571 |
+
w, V = numpy.linalg.eig(M33.T)
|
572 |
+
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
|
573 |
+
if len(i):
|
574 |
+
# parallel projection
|
575 |
+
normal = numpy.real(V[:, i[0]]).squeeze()
|
576 |
+
normal /= vector_norm(normal)
|
577 |
+
return point, normal, direction, None, False
|
578 |
+
else:
|
579 |
+
# orthogonal projection, where normal equals direction vector
|
580 |
+
return point, direction, None, None, False
|
581 |
+
else:
|
582 |
+
# perspective projection
|
583 |
+
i = numpy.where(abs(numpy.real(w)) > 1e-8)[0]
|
584 |
+
if not len(i):
|
585 |
+
raise ValueError(
|
586 |
+
'no eigenvector not corresponding to eigenvalue 0')
|
587 |
+
point = numpy.real(V[:, i[-1]]).squeeze()
|
588 |
+
point /= point[3]
|
589 |
+
normal = - M[3, :3]
|
590 |
+
perspective = M[:3, 3] / numpy.dot(point[:3], normal)
|
591 |
+
if pseudo:
|
592 |
+
perspective -= normal
|
593 |
+
return point, normal, None, perspective, pseudo
|
594 |
+
|
595 |
+
|
596 |
+
def clip_matrix(left, right, bottom, top, near, far, perspective=False):
|
597 |
+
"""Return matrix to obtain normalized device coordinates from frustum.
|
598 |
+
|
599 |
+
The frustum bounds are axis-aligned along x (left, right),
|
600 |
+
y (bottom, top) and z (near, far).
|
601 |
+
|
602 |
+
Normalized device coordinates are in range [-1, 1] if coordinates are
|
603 |
+
inside the frustum.
|
604 |
+
|
605 |
+
If perspective is True the frustum is a truncated pyramid with the
|
606 |
+
perspective point at origin and direction along z axis, otherwise an
|
607 |
+
orthographic canonical view volume (a box).
|
608 |
+
|
609 |
+
Homogeneous coordinates transformed by the perspective clip matrix
|
610 |
+
need to be dehomogenized (divided by w coordinate).
|
611 |
+
|
612 |
+
>>> frustum = numpy.random.rand(6)
|
613 |
+
>>> frustum[1] += frustum[0]
|
614 |
+
>>> frustum[3] += frustum[2]
|
615 |
+
>>> frustum[5] += frustum[4]
|
616 |
+
>>> M = clip_matrix(perspective=False, *frustum)
|
617 |
+
>>> numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1])
|
618 |
+
array([-1., -1., -1., 1.])
|
619 |
+
>>> numpy.dot(M, [frustum[1], frustum[3], frustum[5], 1])
|
620 |
+
array([ 1., 1., 1., 1.])
|
621 |
+
>>> M = clip_matrix(perspective=True, *frustum)
|
622 |
+
>>> v = numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1])
|
623 |
+
>>> v / v[3]
|
624 |
+
array([-1., -1., -1., 1.])
|
625 |
+
>>> v = numpy.dot(M, [frustum[1], frustum[3], frustum[4], 1])
|
626 |
+
>>> v / v[3]
|
627 |
+
array([ 1., 1., -1., 1.])
|
628 |
+
|
629 |
+
"""
|
630 |
+
if left >= right or bottom >= top or near >= far:
|
631 |
+
raise ValueError('invalid frustum')
|
632 |
+
if perspective:
|
633 |
+
if near <= _EPS:
|
634 |
+
raise ValueError('invalid frustum: near <= 0')
|
635 |
+
t = 2.0 * near
|
636 |
+
M = [[t/(left-right), 0.0, (right+left)/(right-left), 0.0],
|
637 |
+
[0.0, t/(bottom-top), (top+bottom)/(top-bottom), 0.0],
|
638 |
+
[0.0, 0.0, (far+near)/(near-far), t*far/(far-near)],
|
639 |
+
[0.0, 0.0, -1.0, 0.0]]
|
640 |
+
else:
|
641 |
+
M = [[2.0/(right-left), 0.0, 0.0, (right+left)/(left-right)],
|
642 |
+
[0.0, 2.0/(top-bottom), 0.0, (top+bottom)/(bottom-top)],
|
643 |
+
[0.0, 0.0, 2.0/(far-near), (far+near)/(near-far)],
|
644 |
+
[0.0, 0.0, 0.0, 1.0]]
|
645 |
+
return numpy.array(M)
|
646 |
+
|
647 |
+
|
648 |
+
def shear_matrix(angle, direction, point, normal):
|
649 |
+
"""Return matrix to shear by angle along direction vector on shear plane.
|
650 |
+
|
651 |
+
The shear plane is defined by a point and normal vector. The direction
|
652 |
+
vector must be orthogonal to the plane's normal vector.
|
653 |
+
|
654 |
+
A point P is transformed by the shear matrix into P" such that
|
655 |
+
the vector P-P" is parallel to the direction vector and its extent is
|
656 |
+
given by the angle of P-P'-P", where P' is the orthogonal projection
|
657 |
+
of P onto the shear plane.
|
658 |
+
|
659 |
+
>>> angle = (random.random() - 0.5) * 4*math.pi
|
660 |
+
>>> direct = numpy.random.random(3) - 0.5
|
661 |
+
>>> point = numpy.random.random(3) - 0.5
|
662 |
+
>>> normal = numpy.cross(direct, numpy.random.random(3))
|
663 |
+
>>> S = shear_matrix(angle, direct, point, normal)
|
664 |
+
>>> numpy.allclose(1, numpy.linalg.det(S))
|
665 |
+
True
|
666 |
+
|
667 |
+
"""
|
668 |
+
normal = unit_vector(normal[:3])
|
669 |
+
direction = unit_vector(direction[:3])
|
670 |
+
if abs(numpy.dot(normal, direction)) > 1e-6:
|
671 |
+
raise ValueError('direction and normal vectors are not orthogonal')
|
672 |
+
angle = math.tan(angle)
|
673 |
+
M = numpy.identity(4)
|
674 |
+
M[:3, :3] += angle * numpy.outer(direction, normal)
|
675 |
+
M[:3, 3] = -angle * numpy.dot(point[:3], normal) * direction
|
676 |
+
return M
|
677 |
+
|
678 |
+
|
679 |
+
def shear_from_matrix(matrix):
|
680 |
+
"""Return shear angle, direction and plane from shear matrix.
|
681 |
+
|
682 |
+
>>> angle = (random.random() - 0.5) * 4*math.pi
|
683 |
+
>>> direct = numpy.random.random(3) - 0.5
|
684 |
+
>>> point = numpy.random.random(3) - 0.5
|
685 |
+
>>> normal = numpy.cross(direct, numpy.random.random(3))
|
686 |
+
>>> S0 = shear_matrix(angle, direct, point, normal)
|
687 |
+
>>> angle, direct, point, normal = shear_from_matrix(S0)
|
688 |
+
>>> S1 = shear_matrix(angle, direct, point, normal)
|
689 |
+
>>> is_same_transform(S0, S1)
|
690 |
+
True
|
691 |
+
|
692 |
+
"""
|
693 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
|
694 |
+
M33 = M[:3, :3]
|
695 |
+
# normal: cross independent eigenvectors corresponding to the eigenvalue 1
|
696 |
+
w, V = numpy.linalg.eig(M33)
|
697 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-4)[0]
|
698 |
+
if len(i) < 2:
|
699 |
+
raise ValueError('no two linear independent eigenvectors found %s' % w)
|
700 |
+
V = numpy.real(V[:, i]).squeeze().T
|
701 |
+
lenorm = -1.0
|
702 |
+
for i0, i1 in ((0, 1), (0, 2), (1, 2)):
|
703 |
+
n = numpy.cross(V[i0], V[i1])
|
704 |
+
w = vector_norm(n)
|
705 |
+
if w > lenorm:
|
706 |
+
lenorm = w
|
707 |
+
normal = n
|
708 |
+
normal /= lenorm
|
709 |
+
# direction and angle
|
710 |
+
direction = numpy.dot(M33 - numpy.identity(3), normal)
|
711 |
+
angle = vector_norm(direction)
|
712 |
+
direction /= angle
|
713 |
+
angle = math.atan(angle)
|
714 |
+
# point: eigenvector corresponding to eigenvalue 1
|
715 |
+
w, V = numpy.linalg.eig(M)
|
716 |
+
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
|
717 |
+
if not len(i):
|
718 |
+
raise ValueError('no eigenvector corresponding to eigenvalue 1')
|
719 |
+
point = numpy.real(V[:, i[-1]]).squeeze()
|
720 |
+
point /= point[3]
|
721 |
+
return angle, direction, point, normal
|
722 |
+
|
723 |
+
|
724 |
+
def decompose_matrix(matrix):
|
725 |
+
"""Return sequence of transformations from transformation matrix.
|
726 |
+
|
727 |
+
matrix : array_like
|
728 |
+
Non-degenerative homogeneous transformation matrix
|
729 |
+
|
730 |
+
Return tuple of:
|
731 |
+
scale : vector of 3 scaling factors
|
732 |
+
shear : list of shear factors for x-y, x-z, y-z axes
|
733 |
+
angles : list of Euler angles about static x, y, z axes
|
734 |
+
translate : translation vector along x, y, z axes
|
735 |
+
perspective : perspective partition of matrix
|
736 |
+
|
737 |
+
Raise ValueError if matrix is of wrong type or degenerative.
|
738 |
+
|
739 |
+
>>> T0 = translation_matrix([1, 2, 3])
|
740 |
+
>>> scale, shear, angles, trans, persp = decompose_matrix(T0)
|
741 |
+
>>> T1 = translation_matrix(trans)
|
742 |
+
>>> numpy.allclose(T0, T1)
|
743 |
+
True
|
744 |
+
>>> S = scale_matrix(0.123)
|
745 |
+
>>> scale, shear, angles, trans, persp = decompose_matrix(S)
|
746 |
+
>>> scale[0]
|
747 |
+
0.123
|
748 |
+
>>> R0 = euler_matrix(1, 2, 3)
|
749 |
+
>>> scale, shear, angles, trans, persp = decompose_matrix(R0)
|
750 |
+
>>> R1 = euler_matrix(*angles)
|
751 |
+
>>> numpy.allclose(R0, R1)
|
752 |
+
True
|
753 |
+
|
754 |
+
"""
|
755 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=True).T
|
756 |
+
if abs(M[3, 3]) < _EPS:
|
757 |
+
raise ValueError('M[3, 3] is zero')
|
758 |
+
M /= M[3, 3]
|
759 |
+
P = M.copy()
|
760 |
+
P[:, 3] = 0.0, 0.0, 0.0, 1.0
|
761 |
+
if not numpy.linalg.det(P):
|
762 |
+
raise ValueError('matrix is singular')
|
763 |
+
|
764 |
+
scale = numpy.zeros((3, ))
|
765 |
+
shear = [0.0, 0.0, 0.0]
|
766 |
+
angles = [0.0, 0.0, 0.0]
|
767 |
+
|
768 |
+
if any(abs(M[:3, 3]) > _EPS):
|
769 |
+
perspective = numpy.dot(M[:, 3], numpy.linalg.inv(P.T))
|
770 |
+
M[:, 3] = 0.0, 0.0, 0.0, 1.0
|
771 |
+
else:
|
772 |
+
perspective = numpy.array([0.0, 0.0, 0.0, 1.0])
|
773 |
+
|
774 |
+
translate = M[3, :3].copy()
|
775 |
+
M[3, :3] = 0.0
|
776 |
+
|
777 |
+
row = M[:3, :3].copy()
|
778 |
+
scale[0] = vector_norm(row[0])
|
779 |
+
row[0] /= scale[0]
|
780 |
+
shear[0] = numpy.dot(row[0], row[1])
|
781 |
+
row[1] -= row[0] * shear[0]
|
782 |
+
scale[1] = vector_norm(row[1])
|
783 |
+
row[1] /= scale[1]
|
784 |
+
shear[0] /= scale[1]
|
785 |
+
shear[1] = numpy.dot(row[0], row[2])
|
786 |
+
row[2] -= row[0] * shear[1]
|
787 |
+
shear[2] = numpy.dot(row[1], row[2])
|
788 |
+
row[2] -= row[1] * shear[2]
|
789 |
+
scale[2] = vector_norm(row[2])
|
790 |
+
row[2] /= scale[2]
|
791 |
+
shear[1:] /= scale[2]
|
792 |
+
|
793 |
+
if numpy.dot(row[0], numpy.cross(row[1], row[2])) < 0:
|
794 |
+
numpy.negative(scale, scale)
|
795 |
+
numpy.negative(row, row)
|
796 |
+
|
797 |
+
angles[1] = math.asin(-row[0, 2])
|
798 |
+
if math.cos(angles[1]):
|
799 |
+
angles[0] = math.atan2(row[1, 2], row[2, 2])
|
800 |
+
angles[2] = math.atan2(row[0, 1], row[0, 0])
|
801 |
+
else:
|
802 |
+
# angles[0] = math.atan2(row[1, 0], row[1, 1])
|
803 |
+
angles[0] = math.atan2(-row[2, 1], row[1, 1])
|
804 |
+
angles[2] = 0.0
|
805 |
+
|
806 |
+
return scale, shear, angles, translate, perspective
|
807 |
+
|
808 |
+
|
809 |
+
def compose_matrix(scale=None, shear=None, angles=None, translate=None,
|
810 |
+
perspective=None):
|
811 |
+
"""Return transformation matrix from sequence of transformations.
|
812 |
+
|
813 |
+
This is the inverse of the decompose_matrix function.
|
814 |
+
|
815 |
+
Sequence of transformations:
|
816 |
+
scale : vector of 3 scaling factors
|
817 |
+
shear : list of shear factors for x-y, x-z, y-z axes
|
818 |
+
angles : list of Euler angles about static x, y, z axes
|
819 |
+
translate : translation vector along x, y, z axes
|
820 |
+
perspective : perspective partition of matrix
|
821 |
+
|
822 |
+
>>> scale = numpy.random.random(3) - 0.5
|
823 |
+
>>> shear = numpy.random.random(3) - 0.5
|
824 |
+
>>> angles = (numpy.random.random(3) - 0.5) * (2*math.pi)
|
825 |
+
>>> trans = numpy.random.random(3) - 0.5
|
826 |
+
>>> persp = numpy.random.random(4) - 0.5
|
827 |
+
>>> M0 = compose_matrix(scale, shear, angles, trans, persp)
|
828 |
+
>>> result = decompose_matrix(M0)
|
829 |
+
>>> M1 = compose_matrix(*result)
|
830 |
+
>>> is_same_transform(M0, M1)
|
831 |
+
True
|
832 |
+
|
833 |
+
"""
|
834 |
+
M = numpy.identity(4)
|
835 |
+
if perspective is not None:
|
836 |
+
P = numpy.identity(4)
|
837 |
+
P[3, :] = perspective[:4]
|
838 |
+
M = numpy.dot(M, P)
|
839 |
+
if translate is not None:
|
840 |
+
T = numpy.identity(4)
|
841 |
+
T[:3, 3] = translate[:3]
|
842 |
+
M = numpy.dot(M, T)
|
843 |
+
if angles is not None:
|
844 |
+
R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz')
|
845 |
+
M = numpy.dot(M, R)
|
846 |
+
if shear is not None:
|
847 |
+
Z = numpy.identity(4)
|
848 |
+
Z[1, 2] = shear[2]
|
849 |
+
Z[0, 2] = shear[1]
|
850 |
+
Z[0, 1] = shear[0]
|
851 |
+
M = numpy.dot(M, Z)
|
852 |
+
if scale is not None:
|
853 |
+
S = numpy.identity(4)
|
854 |
+
S[0, 0] = scale[0]
|
855 |
+
S[1, 1] = scale[1]
|
856 |
+
S[2, 2] = scale[2]
|
857 |
+
M = numpy.dot(M, S)
|
858 |
+
M /= M[3, 3]
|
859 |
+
return M
|
860 |
+
|
861 |
+
|
862 |
+
def orthogonalization_matrix(lengths, angles):
|
863 |
+
"""Return orthogonalization matrix for crystallographic cell coordinates.
|
864 |
+
|
865 |
+
Angles are expected in degrees.
|
866 |
+
|
867 |
+
The de-orthogonalization matrix is the inverse.
|
868 |
+
|
869 |
+
>>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90])
|
870 |
+
>>> numpy.allclose(O[:3, :3], numpy.identity(3, float) * 10)
|
871 |
+
True
|
872 |
+
>>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7])
|
873 |
+
>>> numpy.allclose(numpy.sum(O), 43.063229)
|
874 |
+
True
|
875 |
+
|
876 |
+
"""
|
877 |
+
a, b, c = lengths
|
878 |
+
angles = numpy.radians(angles)
|
879 |
+
sina, sinb, _ = numpy.sin(angles)
|
880 |
+
cosa, cosb, cosg = numpy.cos(angles)
|
881 |
+
co = (cosa * cosb - cosg) / (sina * sinb)
|
882 |
+
return numpy.array([
|
883 |
+
[ a*sinb*math.sqrt(1.0-co*co), 0.0, 0.0, 0.0],
|
884 |
+
[-a*sinb*co, b*sina, 0.0, 0.0],
|
885 |
+
[ a*cosb, b*cosa, c, 0.0],
|
886 |
+
[ 0.0, 0.0, 0.0, 1.0]])
|
887 |
+
|
888 |
+
|
889 |
+
def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True):
|
890 |
+
"""Return affine transform matrix to register two point sets.
|
891 |
+
|
892 |
+
v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous
|
893 |
+
coordinates, where ndims is the dimensionality of the coordinate space.
|
894 |
+
|
895 |
+
If shear is False, a similarity transformation matrix is returned.
|
896 |
+
If also scale is False, a rigid/Euclidean transformation matrix
|
897 |
+
is returned.
|
898 |
+
|
899 |
+
By default the algorithm by Hartley and Zissermann [15] is used.
|
900 |
+
If usesvd is True, similarity and Euclidean transformation matrices
|
901 |
+
are calculated by minimizing the weighted sum of squared deviations
|
902 |
+
(RMSD) according to the algorithm by Kabsch [8].
|
903 |
+
Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9]
|
904 |
+
is used, which is slower when using this Python implementation.
|
905 |
+
|
906 |
+
The returned matrix performs rotation, translation and uniform scaling
|
907 |
+
(if specified).
|
908 |
+
|
909 |
+
>>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
|
910 |
+
>>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
|
911 |
+
>>> affine_matrix_from_points(v0, v1)
|
912 |
+
array([[ 0.14549, 0.00062, 675.50008],
|
913 |
+
[ 0.00048, 0.14094, 53.24971],
|
914 |
+
[ 0. , 0. , 1. ]])
|
915 |
+
>>> T = translation_matrix(numpy.random.random(3)-0.5)
|
916 |
+
>>> R = random_rotation_matrix(numpy.random.random(3))
|
917 |
+
>>> S = scale_matrix(random.random())
|
918 |
+
>>> M = concatenate_matrices(T, R, S)
|
919 |
+
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
|
920 |
+
>>> v0[3] = 1
|
921 |
+
>>> v1 = numpy.dot(M, v0)
|
922 |
+
>>> v0[:3] += numpy.random.normal(0, 1e-8, 300).reshape(3, -1)
|
923 |
+
>>> M = affine_matrix_from_points(v0[:3], v1[:3])
|
924 |
+
>>> numpy.allclose(v1, numpy.dot(M, v0))
|
925 |
+
True
|
926 |
+
|
927 |
+
More examples in superimposition_matrix()
|
928 |
+
|
929 |
+
"""
|
930 |
+
v0 = numpy.array(v0, dtype=numpy.float64, copy=True)
|
931 |
+
v1 = numpy.array(v1, dtype=numpy.float64, copy=True)
|
932 |
+
|
933 |
+
ndims = v0.shape[0]
|
934 |
+
if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape:
|
935 |
+
raise ValueError('input arrays are of wrong shape or type')
|
936 |
+
|
937 |
+
# move centroids to origin
|
938 |
+
t0 = -numpy.mean(v0, axis=1)
|
939 |
+
M0 = numpy.identity(ndims+1)
|
940 |
+
M0[:ndims, ndims] = t0
|
941 |
+
v0 += t0.reshape(ndims, 1)
|
942 |
+
t1 = -numpy.mean(v1, axis=1)
|
943 |
+
M1 = numpy.identity(ndims+1)
|
944 |
+
M1[:ndims, ndims] = t1
|
945 |
+
v1 += t1.reshape(ndims, 1)
|
946 |
+
|
947 |
+
if shear:
|
948 |
+
# Affine transformation
|
949 |
+
A = numpy.concatenate((v0, v1), axis=0)
|
950 |
+
u, s, vh = numpy.linalg.svd(A.T)
|
951 |
+
vh = vh[:ndims].T
|
952 |
+
B = vh[:ndims]
|
953 |
+
C = vh[ndims:2*ndims]
|
954 |
+
t = numpy.dot(C, numpy.linalg.pinv(B))
|
955 |
+
t = numpy.concatenate((t, numpy.zeros((ndims, 1))), axis=1)
|
956 |
+
M = numpy.vstack((t, ((0.0,)*ndims) + (1.0,)))
|
957 |
+
elif usesvd or ndims != 3:
|
958 |
+
# Rigid transformation via SVD of covariance matrix
|
959 |
+
u, s, vh = numpy.linalg.svd(numpy.dot(v1, v0.T))
|
960 |
+
# rotation matrix from SVD orthonormal bases
|
961 |
+
R = numpy.dot(u, vh)
|
962 |
+
if numpy.linalg.det(R) < 0.0:
|
963 |
+
# R does not constitute right handed system
|
964 |
+
R -= numpy.outer(u[:, ndims-1], vh[ndims-1, :]*2.0)
|
965 |
+
s[-1] *= -1.0
|
966 |
+
# homogeneous transformation matrix
|
967 |
+
M = numpy.identity(ndims+1)
|
968 |
+
M[:ndims, :ndims] = R
|
969 |
+
else:
|
970 |
+
# Rigid transformation matrix via quaternion
|
971 |
+
# compute symmetric matrix N
|
972 |
+
xx, yy, zz = numpy.sum(v0 * v1, axis=1)
|
973 |
+
xy, yz, zx = numpy.sum(v0 * numpy.roll(v1, -1, axis=0), axis=1)
|
974 |
+
xz, yx, zy = numpy.sum(v0 * numpy.roll(v1, -2, axis=0), axis=1)
|
975 |
+
N = [[xx+yy+zz, 0.0, 0.0, 0.0],
|
976 |
+
[yz-zy, xx-yy-zz, 0.0, 0.0],
|
977 |
+
[zx-xz, xy+yx, yy-xx-zz, 0.0],
|
978 |
+
[xy-yx, zx+xz, yz+zy, zz-xx-yy]]
|
979 |
+
# quaternion: eigenvector corresponding to most positive eigenvalue
|
980 |
+
w, V = numpy.linalg.eigh(N)
|
981 |
+
q = V[:, numpy.argmax(w)]
|
982 |
+
q /= vector_norm(q) # unit quaternion
|
983 |
+
# homogeneous transformation matrix
|
984 |
+
M = quaternion_matrix(q)
|
985 |
+
|
986 |
+
if scale and not shear:
|
987 |
+
# Affine transformation; scale is ratio of RMS deviations from centroid
|
988 |
+
v0 *= v0
|
989 |
+
v1 *= v1
|
990 |
+
M[:ndims, :ndims] *= math.sqrt(numpy.sum(v1) / numpy.sum(v0))
|
991 |
+
|
992 |
+
# move centroids back
|
993 |
+
M = numpy.dot(numpy.linalg.inv(M1), numpy.dot(M, M0))
|
994 |
+
M /= M[ndims, ndims]
|
995 |
+
return M
|
996 |
+
|
997 |
+
|
998 |
+
def superimposition_matrix(v0, v1, scale=False, usesvd=True):
|
999 |
+
"""Return matrix to transform given 3D point set into second point set.
|
1000 |
+
|
1001 |
+
v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points.
|
1002 |
+
|
1003 |
+
The parameters scale and usesvd are explained in the more general
|
1004 |
+
affine_matrix_from_points function.
|
1005 |
+
|
1006 |
+
The returned matrix is a similarity or Euclidean transformation matrix.
|
1007 |
+
This function has a fast C implementation in transformations.c.
|
1008 |
+
|
1009 |
+
>>> v0 = numpy.random.rand(3, 10)
|
1010 |
+
>>> M = superimposition_matrix(v0, v0)
|
1011 |
+
>>> numpy.allclose(M, numpy.identity(4))
|
1012 |
+
True
|
1013 |
+
>>> R = random_rotation_matrix(numpy.random.random(3))
|
1014 |
+
>>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]]
|
1015 |
+
>>> v1 = numpy.dot(R, v0)
|
1016 |
+
>>> M = superimposition_matrix(v0, v1)
|
1017 |
+
>>> numpy.allclose(v1, numpy.dot(M, v0))
|
1018 |
+
True
|
1019 |
+
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
|
1020 |
+
>>> v0[3] = 1
|
1021 |
+
>>> v1 = numpy.dot(R, v0)
|
1022 |
+
>>> M = superimposition_matrix(v0, v1)
|
1023 |
+
>>> numpy.allclose(v1, numpy.dot(M, v0))
|
1024 |
+
True
|
1025 |
+
>>> S = scale_matrix(random.random())
|
1026 |
+
>>> T = translation_matrix(numpy.random.random(3)-0.5)
|
1027 |
+
>>> M = concatenate_matrices(T, R, S)
|
1028 |
+
>>> v1 = numpy.dot(M, v0)
|
1029 |
+
>>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1)
|
1030 |
+
>>> M = superimposition_matrix(v0, v1, scale=True)
|
1031 |
+
>>> numpy.allclose(v1, numpy.dot(M, v0))
|
1032 |
+
True
|
1033 |
+
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
|
1034 |
+
>>> numpy.allclose(v1, numpy.dot(M, v0))
|
1035 |
+
True
|
1036 |
+
>>> v = numpy.empty((4, 100, 3))
|
1037 |
+
>>> v[:, :, 0] = v0
|
1038 |
+
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
|
1039 |
+
>>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0]))
|
1040 |
+
True
|
1041 |
+
|
1042 |
+
"""
|
1043 |
+
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3]
|
1044 |
+
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3]
|
1045 |
+
return affine_matrix_from_points(v0, v1, shear=False,
|
1046 |
+
scale=scale, usesvd=usesvd)
|
1047 |
+
|
1048 |
+
|
1049 |
+
def euler_matrix(ai, aj, ak, axes='sxyz'):
|
1050 |
+
"""Return homogeneous rotation matrix from Euler angles and axis sequence.
|
1051 |
+
|
1052 |
+
ai, aj, ak : Euler's roll, pitch and yaw angles
|
1053 |
+
axes : One of 24 axis sequences as string or encoded tuple
|
1054 |
+
|
1055 |
+
>>> R = euler_matrix(1, 2, 3, 'syxz')
|
1056 |
+
>>> numpy.allclose(numpy.sum(R[0]), -1.34786452)
|
1057 |
+
True
|
1058 |
+
>>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))
|
1059 |
+
>>> numpy.allclose(numpy.sum(R[0]), -0.383436184)
|
1060 |
+
True
|
1061 |
+
>>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5)
|
1062 |
+
>>> for axes in _AXES2TUPLE.keys():
|
1063 |
+
... R = euler_matrix(ai, aj, ak, axes)
|
1064 |
+
>>> for axes in _TUPLE2AXES.keys():
|
1065 |
+
... R = euler_matrix(ai, aj, ak, axes)
|
1066 |
+
|
1067 |
+
"""
|
1068 |
+
try:
|
1069 |
+
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes]
|
1070 |
+
except (AttributeError, KeyError):
|
1071 |
+
_TUPLE2AXES[axes] # validation
|
1072 |
+
firstaxis, parity, repetition, frame = axes
|
1073 |
+
|
1074 |
+
i = firstaxis
|
1075 |
+
j = _NEXT_AXIS[i+parity]
|
1076 |
+
k = _NEXT_AXIS[i-parity+1]
|
1077 |
+
|
1078 |
+
if frame:
|
1079 |
+
ai, ak = ak, ai
|
1080 |
+
if parity:
|
1081 |
+
ai, aj, ak = -ai, -aj, -ak
|
1082 |
+
|
1083 |
+
si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak)
|
1084 |
+
ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak)
|
1085 |
+
cc, cs = ci*ck, ci*sk
|
1086 |
+
sc, ss = si*ck, si*sk
|
1087 |
+
|
1088 |
+
M = numpy.identity(4)
|
1089 |
+
if repetition:
|
1090 |
+
M[i, i] = cj
|
1091 |
+
M[i, j] = sj*si
|
1092 |
+
M[i, k] = sj*ci
|
1093 |
+
M[j, i] = sj*sk
|
1094 |
+
M[j, j] = -cj*ss+cc
|
1095 |
+
M[j, k] = -cj*cs-sc
|
1096 |
+
M[k, i] = -sj*ck
|
1097 |
+
M[k, j] = cj*sc+cs
|
1098 |
+
M[k, k] = cj*cc-ss
|
1099 |
+
else:
|
1100 |
+
M[i, i] = cj*ck
|
1101 |
+
M[i, j] = sj*sc-cs
|
1102 |
+
M[i, k] = sj*cc+ss
|
1103 |
+
M[j, i] = cj*sk
|
1104 |
+
M[j, j] = sj*ss+cc
|
1105 |
+
M[j, k] = sj*cs-sc
|
1106 |
+
M[k, i] = -sj
|
1107 |
+
M[k, j] = cj*si
|
1108 |
+
M[k, k] = cj*ci
|
1109 |
+
return M
|
1110 |
+
|
1111 |
+
|
1112 |
+
def euler_from_matrix(matrix, axes='sxyz'):
|
1113 |
+
"""Return Euler angles from rotation matrix for specified axis sequence.
|
1114 |
+
|
1115 |
+
axes : One of 24 axis sequences as string or encoded tuple
|
1116 |
+
|
1117 |
+
Note that many Euler angle triplets can describe one matrix.
|
1118 |
+
|
1119 |
+
>>> R0 = euler_matrix(1, 2, 3, 'syxz')
|
1120 |
+
>>> al, be, ga = euler_from_matrix(R0, 'syxz')
|
1121 |
+
>>> R1 = euler_matrix(al, be, ga, 'syxz')
|
1122 |
+
>>> numpy.allclose(R0, R1)
|
1123 |
+
True
|
1124 |
+
>>> angles = (4*math.pi) * (numpy.random.random(3) - 0.5)
|
1125 |
+
>>> for axes in _AXES2TUPLE.keys():
|
1126 |
+
... R0 = euler_matrix(axes=axes, *angles)
|
1127 |
+
... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes))
|
1128 |
+
... if not numpy.allclose(R0, R1): print(axes, "failed")
|
1129 |
+
|
1130 |
+
"""
|
1131 |
+
try:
|
1132 |
+
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
|
1133 |
+
except (AttributeError, KeyError):
|
1134 |
+
_TUPLE2AXES[axes] # validation
|
1135 |
+
firstaxis, parity, repetition, frame = axes
|
1136 |
+
|
1137 |
+
i = firstaxis
|
1138 |
+
j = _NEXT_AXIS[i+parity]
|
1139 |
+
k = _NEXT_AXIS[i-parity+1]
|
1140 |
+
|
1141 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3]
|
1142 |
+
if repetition:
|
1143 |
+
sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k])
|
1144 |
+
if sy > _EPS:
|
1145 |
+
ax = math.atan2( M[i, j], M[i, k])
|
1146 |
+
ay = math.atan2( sy, M[i, i])
|
1147 |
+
az = math.atan2( M[j, i], -M[k, i])
|
1148 |
+
else:
|
1149 |
+
ax = math.atan2(-M[j, k], M[j, j])
|
1150 |
+
ay = math.atan2( sy, M[i, i])
|
1151 |
+
az = 0.0
|
1152 |
+
else:
|
1153 |
+
cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i])
|
1154 |
+
if cy > _EPS:
|
1155 |
+
ax = math.atan2( M[k, j], M[k, k])
|
1156 |
+
ay = math.atan2(-M[k, i], cy)
|
1157 |
+
az = math.atan2( M[j, i], M[i, i])
|
1158 |
+
else:
|
1159 |
+
ax = math.atan2(-M[j, k], M[j, j])
|
1160 |
+
ay = math.atan2(-M[k, i], cy)
|
1161 |
+
az = 0.0
|
1162 |
+
|
1163 |
+
if parity:
|
1164 |
+
ax, ay, az = -ax, -ay, -az
|
1165 |
+
if frame:
|
1166 |
+
ax, az = az, ax
|
1167 |
+
return ax, ay, az
|
1168 |
+
|
1169 |
+
|
1170 |
+
def euler_from_quaternion(quaternion, axes='sxyz'):
|
1171 |
+
"""Return Euler angles from quaternion for specified axis sequence.
|
1172 |
+
|
1173 |
+
>>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0])
|
1174 |
+
>>> numpy.allclose(angles, [0.123, 0, 0])
|
1175 |
+
True
|
1176 |
+
|
1177 |
+
"""
|
1178 |
+
return euler_from_matrix(quaternion_matrix(quaternion), axes)
|
1179 |
+
|
1180 |
+
|
1181 |
+
def quaternion_from_euler(ai, aj, ak, axes='sxyz'):
|
1182 |
+
"""Return quaternion from Euler angles and axis sequence.
|
1183 |
+
|
1184 |
+
ai, aj, ak : Euler's roll, pitch and yaw angles
|
1185 |
+
axes : One of 24 axis sequences as string or encoded tuple
|
1186 |
+
|
1187 |
+
>>> q = quaternion_from_euler(1, 2, 3, 'ryxz')
|
1188 |
+
>>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435])
|
1189 |
+
True
|
1190 |
+
|
1191 |
+
"""
|
1192 |
+
try:
|
1193 |
+
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
|
1194 |
+
except (AttributeError, KeyError):
|
1195 |
+
_TUPLE2AXES[axes] # validation
|
1196 |
+
firstaxis, parity, repetition, frame = axes
|
1197 |
+
|
1198 |
+
i = firstaxis + 1
|
1199 |
+
j = _NEXT_AXIS[i+parity-1] + 1
|
1200 |
+
k = _NEXT_AXIS[i-parity] + 1
|
1201 |
+
|
1202 |
+
if frame:
|
1203 |
+
ai, ak = ak, ai
|
1204 |
+
if parity:
|
1205 |
+
aj = -aj
|
1206 |
+
|
1207 |
+
ai /= 2.0
|
1208 |
+
aj /= 2.0
|
1209 |
+
ak /= 2.0
|
1210 |
+
ci = math.cos(ai)
|
1211 |
+
si = math.sin(ai)
|
1212 |
+
cj = math.cos(aj)
|
1213 |
+
sj = math.sin(aj)
|
1214 |
+
ck = math.cos(ak)
|
1215 |
+
sk = math.sin(ak)
|
1216 |
+
cc = ci*ck
|
1217 |
+
cs = ci*sk
|
1218 |
+
sc = si*ck
|
1219 |
+
ss = si*sk
|
1220 |
+
|
1221 |
+
q = numpy.empty((4, ))
|
1222 |
+
if repetition:
|
1223 |
+
q[0] = cj*(cc - ss)
|
1224 |
+
q[i] = cj*(cs + sc)
|
1225 |
+
q[j] = sj*(cc + ss)
|
1226 |
+
q[k] = sj*(cs - sc)
|
1227 |
+
else:
|
1228 |
+
q[0] = cj*cc + sj*ss
|
1229 |
+
q[i] = cj*sc - sj*cs
|
1230 |
+
q[j] = cj*ss + sj*cc
|
1231 |
+
q[k] = cj*cs - sj*sc
|
1232 |
+
if parity:
|
1233 |
+
q[j] *= -1.0
|
1234 |
+
|
1235 |
+
return q
|
1236 |
+
|
1237 |
+
|
1238 |
+
def quaternion_about_axis(angle, axis):
|
1239 |
+
"""Return quaternion for rotation about axis.
|
1240 |
+
|
1241 |
+
>>> q = quaternion_about_axis(0.123, [1, 0, 0])
|
1242 |
+
>>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0])
|
1243 |
+
True
|
1244 |
+
|
1245 |
+
"""
|
1246 |
+
q = numpy.array([0.0, axis[0], axis[1], axis[2]])
|
1247 |
+
qlen = vector_norm(q)
|
1248 |
+
if qlen > _EPS:
|
1249 |
+
q *= math.sin(angle/2.0) / qlen
|
1250 |
+
q[0] = math.cos(angle/2.0)
|
1251 |
+
return q
|
1252 |
+
|
1253 |
+
|
1254 |
+
def quaternion_matrix(quaternion):
|
1255 |
+
"""Return homogeneous rotation matrix from quaternion.
|
1256 |
+
|
1257 |
+
>>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0])
|
1258 |
+
>>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0]))
|
1259 |
+
True
|
1260 |
+
>>> M = quaternion_matrix([1, 0, 0, 0])
|
1261 |
+
>>> numpy.allclose(M, numpy.identity(4))
|
1262 |
+
True
|
1263 |
+
>>> M = quaternion_matrix([0, 1, 0, 0])
|
1264 |
+
>>> numpy.allclose(M, numpy.diag([1, -1, -1, 1]))
|
1265 |
+
True
|
1266 |
+
|
1267 |
+
"""
|
1268 |
+
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
|
1269 |
+
n = numpy.dot(q, q)
|
1270 |
+
if n < _EPS:
|
1271 |
+
return numpy.identity(4)
|
1272 |
+
q *= math.sqrt(2.0 / n)
|
1273 |
+
q = numpy.outer(q, q)
|
1274 |
+
return numpy.array([
|
1275 |
+
[1.0-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.0],
|
1276 |
+
[ q[1, 2]+q[3, 0], 1.0-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.0],
|
1277 |
+
[ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.0-q[1, 1]-q[2, 2], 0.0],
|
1278 |
+
[ 0.0, 0.0, 0.0, 1.0]])
|
1279 |
+
|
1280 |
+
|
1281 |
+
def quaternion_from_matrix(matrix, isprecise=False):
|
1282 |
+
"""Return quaternion from rotation matrix.
|
1283 |
+
|
1284 |
+
If isprecise is True, the input matrix is assumed to be a precise rotation
|
1285 |
+
matrix and a faster algorithm is used.
|
1286 |
+
|
1287 |
+
>>> q = quaternion_from_matrix(numpy.identity(4), True)
|
1288 |
+
>>> numpy.allclose(q, [1, 0, 0, 0])
|
1289 |
+
True
|
1290 |
+
>>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1]))
|
1291 |
+
>>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0])
|
1292 |
+
True
|
1293 |
+
>>> R = rotation_matrix(0.123, (1, 2, 3))
|
1294 |
+
>>> q = quaternion_from_matrix(R, True)
|
1295 |
+
>>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786])
|
1296 |
+
True
|
1297 |
+
>>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0],
|
1298 |
+
... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]]
|
1299 |
+
>>> q = quaternion_from_matrix(R)
|
1300 |
+
>>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611])
|
1301 |
+
True
|
1302 |
+
>>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0],
|
1303 |
+
... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]]
|
1304 |
+
>>> q = quaternion_from_matrix(R)
|
1305 |
+
>>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603])
|
1306 |
+
True
|
1307 |
+
>>> R = random_rotation_matrix()
|
1308 |
+
>>> q = quaternion_from_matrix(R)
|
1309 |
+
>>> is_same_transform(R, quaternion_matrix(q))
|
1310 |
+
True
|
1311 |
+
>>> is_same_quaternion(quaternion_from_matrix(R, isprecise=False),
|
1312 |
+
... quaternion_from_matrix(R, isprecise=True))
|
1313 |
+
True
|
1314 |
+
>>> R = euler_matrix(0.0, 0.0, numpy.pi/2.0)
|
1315 |
+
>>> is_same_quaternion(quaternion_from_matrix(R, isprecise=False),
|
1316 |
+
... quaternion_from_matrix(R, isprecise=True))
|
1317 |
+
True
|
1318 |
+
|
1319 |
+
"""
|
1320 |
+
M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4]
|
1321 |
+
if isprecise:
|
1322 |
+
q = numpy.empty((4, ))
|
1323 |
+
t = numpy.trace(M)
|
1324 |
+
if t > M[3, 3]:
|
1325 |
+
q[0] = t
|
1326 |
+
q[3] = M[1, 0] - M[0, 1]
|
1327 |
+
q[2] = M[0, 2] - M[2, 0]
|
1328 |
+
q[1] = M[2, 1] - M[1, 2]
|
1329 |
+
else:
|
1330 |
+
i, j, k = 0, 1, 2
|
1331 |
+
if M[1, 1] > M[0, 0]:
|
1332 |
+
i, j, k = 1, 2, 0
|
1333 |
+
if M[2, 2] > M[i, i]:
|
1334 |
+
i, j, k = 2, 0, 1
|
1335 |
+
t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3]
|
1336 |
+
q[i] = t
|
1337 |
+
q[j] = M[i, j] + M[j, i]
|
1338 |
+
q[k] = M[k, i] + M[i, k]
|
1339 |
+
q[3] = M[k, j] - M[j, k]
|
1340 |
+
q = q[[3, 0, 1, 2]]
|
1341 |
+
q *= 0.5 / math.sqrt(t * M[3, 3])
|
1342 |
+
else:
|
1343 |
+
m00 = M[0, 0]
|
1344 |
+
m01 = M[0, 1]
|
1345 |
+
m02 = M[0, 2]
|
1346 |
+
m10 = M[1, 0]
|
1347 |
+
m11 = M[1, 1]
|
1348 |
+
m12 = M[1, 2]
|
1349 |
+
m20 = M[2, 0]
|
1350 |
+
m21 = M[2, 1]
|
1351 |
+
m22 = M[2, 2]
|
1352 |
+
# symmetric matrix K
|
1353 |
+
K = numpy.array([[m00-m11-m22, 0.0, 0.0, 0.0],
|
1354 |
+
[m01+m10, m11-m00-m22, 0.0, 0.0],
|
1355 |
+
[m02+m20, m12+m21, m22-m00-m11, 0.0],
|
1356 |
+
[m21-m12, m02-m20, m10-m01, m00+m11+m22]])
|
1357 |
+
K /= 3.0
|
1358 |
+
# quaternion is eigenvector of K that corresponds to largest eigenvalue
|
1359 |
+
w, V = numpy.linalg.eigh(K)
|
1360 |
+
q = V[[3, 0, 1, 2], numpy.argmax(w)]
|
1361 |
+
if q[0] < 0.0:
|
1362 |
+
numpy.negative(q, q)
|
1363 |
+
return q
|
1364 |
+
|
1365 |
+
|
1366 |
+
def quaternion_multiply(quaternion1, quaternion0):
|
1367 |
+
"""Return multiplication of two quaternions.
|
1368 |
+
|
1369 |
+
>>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7])
|
1370 |
+
>>> numpy.allclose(q, [28, -44, -14, 48])
|
1371 |
+
True
|
1372 |
+
|
1373 |
+
"""
|
1374 |
+
w0, x0, y0, z0 = quaternion0
|
1375 |
+
w1, x1, y1, z1 = quaternion1
|
1376 |
+
return numpy.array([
|
1377 |
+
-x1*x0 - y1*y0 - z1*z0 + w1*w0,
|
1378 |
+
x1*w0 + y1*z0 - z1*y0 + w1*x0,
|
1379 |
+
-x1*z0 + y1*w0 + z1*x0 + w1*y0,
|
1380 |
+
x1*y0 - y1*x0 + z1*w0 + w1*z0], dtype=numpy.float64)
|
1381 |
+
|
1382 |
+
|
1383 |
+
def quaternion_conjugate(quaternion):
|
1384 |
+
"""Return conjugate of quaternion.
|
1385 |
+
|
1386 |
+
>>> q0 = random_quaternion()
|
1387 |
+
>>> q1 = quaternion_conjugate(q0)
|
1388 |
+
>>> q1[0] == q0[0] and all(q1[1:] == -q0[1:])
|
1389 |
+
True
|
1390 |
+
|
1391 |
+
"""
|
1392 |
+
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
|
1393 |
+
numpy.negative(q[1:], q[1:])
|
1394 |
+
return q
|
1395 |
+
|
1396 |
+
|
1397 |
+
def quaternion_inverse(quaternion):
|
1398 |
+
"""Return inverse of quaternion.
|
1399 |
+
|
1400 |
+
>>> q0 = random_quaternion()
|
1401 |
+
>>> q1 = quaternion_inverse(q0)
|
1402 |
+
>>> numpy.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0])
|
1403 |
+
True
|
1404 |
+
|
1405 |
+
"""
|
1406 |
+
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
|
1407 |
+
numpy.negative(q[1:], q[1:])
|
1408 |
+
return q / numpy.dot(q, q)
|
1409 |
+
|
1410 |
+
|
1411 |
+
def quaternion_real(quaternion):
|
1412 |
+
"""Return real part of quaternion.
|
1413 |
+
|
1414 |
+
>>> quaternion_real([3, 0, 1, 2])
|
1415 |
+
3.0
|
1416 |
+
|
1417 |
+
"""
|
1418 |
+
return float(quaternion[0])
|
1419 |
+
|
1420 |
+
|
1421 |
+
def quaternion_imag(quaternion):
|
1422 |
+
"""Return imaginary part of quaternion.
|
1423 |
+
|
1424 |
+
>>> quaternion_imag([3, 0, 1, 2])
|
1425 |
+
array([ 0., 1., 2.])
|
1426 |
+
|
1427 |
+
"""
|
1428 |
+
return numpy.array(quaternion[1:4], dtype=numpy.float64, copy=True)
|
1429 |
+
|
1430 |
+
|
1431 |
+
def quaternion_slerp(quat0, quat1, fraction, spin=0, shortestpath=True):
|
1432 |
+
"""Return spherical linear interpolation between two quaternions.
|
1433 |
+
|
1434 |
+
>>> q0 = random_quaternion()
|
1435 |
+
>>> q1 = random_quaternion()
|
1436 |
+
>>> q = quaternion_slerp(q0, q1, 0)
|
1437 |
+
>>> numpy.allclose(q, q0)
|
1438 |
+
True
|
1439 |
+
>>> q = quaternion_slerp(q0, q1, 1, 1)
|
1440 |
+
>>> numpy.allclose(q, q1)
|
1441 |
+
True
|
1442 |
+
>>> q = quaternion_slerp(q0, q1, 0.5)
|
1443 |
+
>>> angle = math.acos(numpy.dot(q0, q))
|
1444 |
+
>>> numpy.allclose(2, math.acos(numpy.dot(q0, q1)) / angle) or \
|
1445 |
+
numpy.allclose(2, math.acos(-numpy.dot(q0, q1)) / angle)
|
1446 |
+
True
|
1447 |
+
|
1448 |
+
"""
|
1449 |
+
q0 = unit_vector(quat0[:4])
|
1450 |
+
q1 = unit_vector(quat1[:4])
|
1451 |
+
if fraction == 0.0:
|
1452 |
+
return q0
|
1453 |
+
elif fraction == 1.0:
|
1454 |
+
return q1
|
1455 |
+
d = numpy.dot(q0, q1)
|
1456 |
+
if abs(abs(d) - 1.0) < _EPS:
|
1457 |
+
return q0
|
1458 |
+
if shortestpath and d < 0.0:
|
1459 |
+
# invert rotation
|
1460 |
+
d = -d
|
1461 |
+
numpy.negative(q1, q1)
|
1462 |
+
angle = math.acos(d) + spin * math.pi
|
1463 |
+
if abs(angle) < _EPS:
|
1464 |
+
return q0
|
1465 |
+
isin = 1.0 / math.sin(angle)
|
1466 |
+
q0 *= math.sin((1.0 - fraction) * angle) * isin
|
1467 |
+
q1 *= math.sin(fraction * angle) * isin
|
1468 |
+
q0 += q1
|
1469 |
+
return q0
|
1470 |
+
|
1471 |
+
|
1472 |
+
def random_quaternion(rand=None):
|
1473 |
+
"""Return uniform random unit quaternion.
|
1474 |
+
|
1475 |
+
rand: array like or None
|
1476 |
+
Three independent random variables that are uniformly distributed
|
1477 |
+
between 0 and 1.
|
1478 |
+
|
1479 |
+
>>> q = random_quaternion()
|
1480 |
+
>>> numpy.allclose(1, vector_norm(q))
|
1481 |
+
True
|
1482 |
+
>>> q = random_quaternion(numpy.random.random(3))
|
1483 |
+
>>> len(q.shape), q.shape[0]==4
|
1484 |
+
(1, True)
|
1485 |
+
|
1486 |
+
"""
|
1487 |
+
if rand is None:
|
1488 |
+
rand = numpy.random.rand(3)
|
1489 |
+
else:
|
1490 |
+
assert len(rand) == 3
|
1491 |
+
r1 = numpy.sqrt(1.0 - rand[0])
|
1492 |
+
r2 = numpy.sqrt(rand[0])
|
1493 |
+
pi2 = math.pi * 2.0
|
1494 |
+
t1 = pi2 * rand[1]
|
1495 |
+
t2 = pi2 * rand[2]
|
1496 |
+
return numpy.array([numpy.cos(t2)*r2, numpy.sin(t1)*r1,
|
1497 |
+
numpy.cos(t1)*r1, numpy.sin(t2)*r2])
|
1498 |
+
|
1499 |
+
|
1500 |
+
def random_rotation_matrix(rand=None):
|
1501 |
+
"""Return uniform random rotation matrix.
|
1502 |
+
|
1503 |
+
rand: array like
|
1504 |
+
Three independent random variables that are uniformly distributed
|
1505 |
+
between 0 and 1 for each returned quaternion.
|
1506 |
+
|
1507 |
+
>>> R = random_rotation_matrix()
|
1508 |
+
>>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4))
|
1509 |
+
True
|
1510 |
+
|
1511 |
+
"""
|
1512 |
+
return quaternion_matrix(random_quaternion(rand))
|
1513 |
+
|
1514 |
+
|
1515 |
+
class Arcball(object):
|
1516 |
+
"""Virtual Trackball Control.
|
1517 |
+
|
1518 |
+
>>> ball = Arcball()
|
1519 |
+
>>> ball = Arcball(initial=numpy.identity(4))
|
1520 |
+
>>> ball.place([320, 320], 320)
|
1521 |
+
>>> ball.down([500, 250])
|
1522 |
+
>>> ball.drag([475, 275])
|
1523 |
+
>>> R = ball.matrix()
|
1524 |
+
>>> numpy.allclose(numpy.sum(R), 3.90583455)
|
1525 |
+
True
|
1526 |
+
>>> ball = Arcball(initial=[1, 0, 0, 0])
|
1527 |
+
>>> ball.place([320, 320], 320)
|
1528 |
+
>>> ball.setaxes([1, 1, 0], [-1, 1, 0])
|
1529 |
+
>>> ball.constrain = True
|
1530 |
+
>>> ball.down([400, 200])
|
1531 |
+
>>> ball.drag([200, 400])
|
1532 |
+
>>> R = ball.matrix()
|
1533 |
+
>>> numpy.allclose(numpy.sum(R), 0.2055924)
|
1534 |
+
True
|
1535 |
+
>>> ball.next()
|
1536 |
+
|
1537 |
+
"""
|
1538 |
+
def __init__(self, initial=None):
|
1539 |
+
"""Initialize virtual trackball control.
|
1540 |
+
|
1541 |
+
initial : quaternion or rotation matrix
|
1542 |
+
|
1543 |
+
"""
|
1544 |
+
self._axis = None
|
1545 |
+
self._axes = None
|
1546 |
+
self._radius = 1.0
|
1547 |
+
self._center = [0.0, 0.0]
|
1548 |
+
self._vdown = numpy.array([0.0, 0.0, 1.0])
|
1549 |
+
self._constrain = False
|
1550 |
+
if initial is None:
|
1551 |
+
self._qdown = numpy.array([1.0, 0.0, 0.0, 0.0])
|
1552 |
+
else:
|
1553 |
+
initial = numpy.array(initial, dtype=numpy.float64)
|
1554 |
+
if initial.shape == (4, 4):
|
1555 |
+
self._qdown = quaternion_from_matrix(initial)
|
1556 |
+
elif initial.shape == (4, ):
|
1557 |
+
initial /= vector_norm(initial)
|
1558 |
+
self._qdown = initial
|
1559 |
+
else:
|
1560 |
+
raise ValueError("initial not a quaternion or matrix")
|
1561 |
+
self._qnow = self._qpre = self._qdown
|
1562 |
+
|
1563 |
+
def place(self, center, radius):
|
1564 |
+
"""Place Arcball, e.g. when window size changes.
|
1565 |
+
|
1566 |
+
center : sequence[2]
|
1567 |
+
Window coordinates of trackball center.
|
1568 |
+
radius : float
|
1569 |
+
Radius of trackball in window coordinates.
|
1570 |
+
|
1571 |
+
"""
|
1572 |
+
self._radius = float(radius)
|
1573 |
+
self._center[0] = center[0]
|
1574 |
+
self._center[1] = center[1]
|
1575 |
+
|
1576 |
+
def setaxes(self, *axes):
|
1577 |
+
"""Set axes to constrain rotations."""
|
1578 |
+
if axes is None:
|
1579 |
+
self._axes = None
|
1580 |
+
else:
|
1581 |
+
self._axes = [unit_vector(axis) for axis in axes]
|
1582 |
+
|
1583 |
+
@property
|
1584 |
+
def constrain(self):
|
1585 |
+
"""Return state of constrain to axis mode."""
|
1586 |
+
return self._constrain
|
1587 |
+
|
1588 |
+
@constrain.setter
|
1589 |
+
def constrain(self, value):
|
1590 |
+
"""Set state of constrain to axis mode."""
|
1591 |
+
self._constrain = bool(value)
|
1592 |
+
|
1593 |
+
def down(self, point):
|
1594 |
+
"""Set initial cursor window coordinates and pick constrain-axis."""
|
1595 |
+
self._vdown = arcball_map_to_sphere(point, self._center, self._radius)
|
1596 |
+
self._qdown = self._qpre = self._qnow
|
1597 |
+
if self._constrain and self._axes is not None:
|
1598 |
+
self._axis = arcball_nearest_axis(self._vdown, self._axes)
|
1599 |
+
self._vdown = arcball_constrain_to_axis(self._vdown, self._axis)
|
1600 |
+
else:
|
1601 |
+
self._axis = None
|
1602 |
+
|
1603 |
+
def drag(self, point):
|
1604 |
+
"""Update current cursor window coordinates."""
|
1605 |
+
vnow = arcball_map_to_sphere(point, self._center, self._radius)
|
1606 |
+
if self._axis is not None:
|
1607 |
+
vnow = arcball_constrain_to_axis(vnow, self._axis)
|
1608 |
+
self._qpre = self._qnow
|
1609 |
+
t = numpy.cross(self._vdown, vnow)
|
1610 |
+
if numpy.dot(t, t) < _EPS:
|
1611 |
+
self._qnow = self._qdown
|
1612 |
+
else:
|
1613 |
+
q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]]
|
1614 |
+
self._qnow = quaternion_multiply(q, self._qdown)
|
1615 |
+
|
1616 |
+
def next(self, acceleration=0.0):
|
1617 |
+
"""Continue rotation in direction of last drag."""
|
1618 |
+
q = quaternion_slerp(self._qpre, self._qnow, 2.0+acceleration, False)
|
1619 |
+
self._qpre, self._qnow = self._qnow, q
|
1620 |
+
|
1621 |
+
def matrix(self):
|
1622 |
+
"""Return homogeneous rotation matrix."""
|
1623 |
+
return quaternion_matrix(self._qnow)
|
1624 |
+
|
1625 |
+
|
1626 |
+
def arcball_map_to_sphere(point, center, radius):
|
1627 |
+
"""Return unit sphere coordinates from window coordinates."""
|
1628 |
+
v0 = (point[0] - center[0]) / radius
|
1629 |
+
v1 = (center[1] - point[1]) / radius
|
1630 |
+
n = v0*v0 + v1*v1
|
1631 |
+
if n > 1.0:
|
1632 |
+
# position outside of sphere
|
1633 |
+
n = math.sqrt(n)
|
1634 |
+
return numpy.array([v0/n, v1/n, 0.0])
|
1635 |
+
else:
|
1636 |
+
return numpy.array([v0, v1, math.sqrt(1.0 - n)])
|
1637 |
+
|
1638 |
+
|
1639 |
+
def arcball_constrain_to_axis(point, axis):
|
1640 |
+
"""Return sphere point perpendicular to axis."""
|
1641 |
+
v = numpy.array(point, dtype=numpy.float64, copy=True)
|
1642 |
+
a = numpy.array(axis, dtype=numpy.float64, copy=True)
|
1643 |
+
v -= a * numpy.dot(a, v) # on plane
|
1644 |
+
n = vector_norm(v)
|
1645 |
+
if n > _EPS:
|
1646 |
+
if v[2] < 0.0:
|
1647 |
+
numpy.negative(v, v)
|
1648 |
+
v /= n
|
1649 |
+
return v
|
1650 |
+
if a[2] == 1.0:
|
1651 |
+
return numpy.array([1.0, 0.0, 0.0])
|
1652 |
+
return unit_vector([-a[1], a[0], 0.0])
|
1653 |
+
|
1654 |
+
|
1655 |
+
def arcball_nearest_axis(point, axes):
|
1656 |
+
"""Return axis, which arc is nearest to point."""
|
1657 |
+
point = numpy.array(point, dtype=numpy.float64, copy=False)
|
1658 |
+
nearest = None
|
1659 |
+
mx = -1.0
|
1660 |
+
for axis in axes:
|
1661 |
+
t = numpy.dot(arcball_constrain_to_axis(point, axis), point)
|
1662 |
+
if t > mx:
|
1663 |
+
nearest = axis
|
1664 |
+
mx = t
|
1665 |
+
return nearest
|
1666 |
+
|
1667 |
+
|
1668 |
+
# epsilon for testing whether a number is close to zero
|
1669 |
+
_EPS = numpy.finfo(float).eps * 4.0
|
1670 |
+
|
1671 |
+
# axis sequences for Euler angles
|
1672 |
+
_NEXT_AXIS = [1, 2, 0, 1]
|
1673 |
+
|
1674 |
+
# map axes strings to/from tuples of inner axis, parity, repetition, frame
|
1675 |
+
_AXES2TUPLE = {
|
1676 |
+
'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0),
|
1677 |
+
'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0),
|
1678 |
+
'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0),
|
1679 |
+
'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0),
|
1680 |
+
'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1),
|
1681 |
+
'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1),
|
1682 |
+
'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1),
|
1683 |
+
'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)}
|
1684 |
+
|
1685 |
+
_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items())
|
1686 |
+
|
1687 |
+
|
1688 |
+
def vector_norm(data, axis=None, out=None):
|
1689 |
+
"""Return length, i.e. Euclidean norm, of ndarray along axis.
|
1690 |
+
|
1691 |
+
>>> v = numpy.random.random(3)
|
1692 |
+
>>> n = vector_norm(v)
|
1693 |
+
>>> numpy.allclose(n, numpy.linalg.norm(v))
|
1694 |
+
True
|
1695 |
+
>>> v = numpy.random.rand(6, 5, 3)
|
1696 |
+
>>> n = vector_norm(v, axis=-1)
|
1697 |
+
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2)))
|
1698 |
+
True
|
1699 |
+
>>> n = vector_norm(v, axis=1)
|
1700 |
+
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
|
1701 |
+
True
|
1702 |
+
>>> v = numpy.random.rand(5, 4, 3)
|
1703 |
+
>>> n = numpy.empty((5, 3))
|
1704 |
+
>>> vector_norm(v, axis=1, out=n)
|
1705 |
+
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
|
1706 |
+
True
|
1707 |
+
>>> vector_norm([])
|
1708 |
+
0.0
|
1709 |
+
>>> vector_norm([1])
|
1710 |
+
1.0
|
1711 |
+
|
1712 |
+
"""
|
1713 |
+
data = numpy.array(data, dtype=numpy.float64, copy=True)
|
1714 |
+
if out is None:
|
1715 |
+
if data.ndim == 1:
|
1716 |
+
return math.sqrt(numpy.dot(data, data))
|
1717 |
+
data *= data
|
1718 |
+
out = numpy.atleast_1d(numpy.sum(data, axis=axis))
|
1719 |
+
numpy.sqrt(out, out)
|
1720 |
+
return out
|
1721 |
+
else:
|
1722 |
+
data *= data
|
1723 |
+
numpy.sum(data, axis=axis, out=out)
|
1724 |
+
numpy.sqrt(out, out)
|
1725 |
+
|
1726 |
+
|
1727 |
+
def unit_vector(data, axis=None, out=None):
|
1728 |
+
"""Return ndarray normalized by length, i.e. Euclidean norm, along axis.
|
1729 |
+
|
1730 |
+
>>> v0 = numpy.random.random(3)
|
1731 |
+
>>> v1 = unit_vector(v0)
|
1732 |
+
>>> numpy.allclose(v1, v0 / numpy.linalg.norm(v0))
|
1733 |
+
True
|
1734 |
+
>>> v0 = numpy.random.rand(5, 4, 3)
|
1735 |
+
>>> v1 = unit_vector(v0, axis=-1)
|
1736 |
+
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=2)), 2)
|
1737 |
+
>>> numpy.allclose(v1, v2)
|
1738 |
+
True
|
1739 |
+
>>> v1 = unit_vector(v0, axis=1)
|
1740 |
+
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=1)), 1)
|
1741 |
+
>>> numpy.allclose(v1, v2)
|
1742 |
+
True
|
1743 |
+
>>> v1 = numpy.empty((5, 4, 3))
|
1744 |
+
>>> unit_vector(v0, axis=1, out=v1)
|
1745 |
+
>>> numpy.allclose(v1, v2)
|
1746 |
+
True
|
1747 |
+
>>> list(unit_vector([]))
|
1748 |
+
[]
|
1749 |
+
>>> list(unit_vector([1]))
|
1750 |
+
[1.0]
|
1751 |
+
|
1752 |
+
"""
|
1753 |
+
if out is None:
|
1754 |
+
data = numpy.array(data, dtype=numpy.float64, copy=True)
|
1755 |
+
if data.ndim == 1:
|
1756 |
+
data /= math.sqrt(numpy.dot(data, data))
|
1757 |
+
return data
|
1758 |
+
else:
|
1759 |
+
if out is not data:
|
1760 |
+
out[:] = numpy.array(data, copy=False)
|
1761 |
+
data = out
|
1762 |
+
length = numpy.atleast_1d(numpy.sum(data*data, axis))
|
1763 |
+
numpy.sqrt(length, length)
|
1764 |
+
if axis is not None:
|
1765 |
+
length = numpy.expand_dims(length, axis)
|
1766 |
+
data /= length
|
1767 |
+
if out is None:
|
1768 |
+
return data
|
1769 |
+
|
1770 |
+
|
1771 |
+
def random_vector(size):
|
1772 |
+
"""Return array of random doubles in the half-open interval [0.0, 1.0).
|
1773 |
+
|
1774 |
+
>>> v = random_vector(10000)
|
1775 |
+
>>> numpy.all(v >= 0) and numpy.all(v < 1)
|
1776 |
+
True
|
1777 |
+
>>> v0 = random_vector(10)
|
1778 |
+
>>> v1 = random_vector(10)
|
1779 |
+
>>> numpy.any(v0 == v1)
|
1780 |
+
False
|
1781 |
+
|
1782 |
+
"""
|
1783 |
+
return numpy.random.random(size)
|
1784 |
+
|
1785 |
+
|
1786 |
+
def vector_product(v0, v1, axis=0):
|
1787 |
+
"""Return vector perpendicular to vectors.
|
1788 |
+
|
1789 |
+
>>> v = vector_product([2, 0, 0], [0, 3, 0])
|
1790 |
+
>>> numpy.allclose(v, [0, 0, 6])
|
1791 |
+
True
|
1792 |
+
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
|
1793 |
+
>>> v1 = [[3], [0], [0]]
|
1794 |
+
>>> v = vector_product(v0, v1)
|
1795 |
+
>>> numpy.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]])
|
1796 |
+
True
|
1797 |
+
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
|
1798 |
+
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
|
1799 |
+
>>> v = vector_product(v0, v1, axis=1)
|
1800 |
+
>>> numpy.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]])
|
1801 |
+
True
|
1802 |
+
|
1803 |
+
"""
|
1804 |
+
return numpy.cross(v0, v1, axis=axis)
|
1805 |
+
|
1806 |
+
|
1807 |
+
def angle_between_vectors(v0, v1, directed=True, axis=0):
|
1808 |
+
"""Return angle between vectors.
|
1809 |
+
|
1810 |
+
If directed is False, the input vectors are interpreted as undirected axes,
|
1811 |
+
i.e. the maximum angle is pi/2.
|
1812 |
+
|
1813 |
+
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3])
|
1814 |
+
>>> numpy.allclose(a, math.pi)
|
1815 |
+
True
|
1816 |
+
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False)
|
1817 |
+
>>> numpy.allclose(a, 0)
|
1818 |
+
True
|
1819 |
+
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
|
1820 |
+
>>> v1 = [[3], [0], [0]]
|
1821 |
+
>>> a = angle_between_vectors(v0, v1)
|
1822 |
+
>>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532])
|
1823 |
+
True
|
1824 |
+
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
|
1825 |
+
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
|
1826 |
+
>>> a = angle_between_vectors(v0, v1, axis=1)
|
1827 |
+
>>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532])
|
1828 |
+
True
|
1829 |
+
|
1830 |
+
"""
|
1831 |
+
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)
|
1832 |
+
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)
|
1833 |
+
dot = numpy.sum(v0 * v1, axis=axis)
|
1834 |
+
dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis)
|
1835 |
+
dot = numpy.clip(dot, -1.0, 1.0)
|
1836 |
+
return numpy.arccos(dot if directed else numpy.fabs(dot))
|
1837 |
+
|
1838 |
+
|
1839 |
+
def inverse_matrix(matrix):
|
1840 |
+
"""Return inverse of square transformation matrix.
|
1841 |
+
|
1842 |
+
>>> M0 = random_rotation_matrix()
|
1843 |
+
>>> M1 = inverse_matrix(M0.T)
|
1844 |
+
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
|
1845 |
+
True
|
1846 |
+
>>> for size in range(1, 7):
|
1847 |
+
... M0 = numpy.random.rand(size, size)
|
1848 |
+
... M1 = inverse_matrix(M0)
|
1849 |
+
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print(size)
|
1850 |
+
|
1851 |
+
"""
|
1852 |
+
return numpy.linalg.inv(matrix)
|
1853 |
+
|
1854 |
+
|
1855 |
+
def concatenate_matrices(*matrices):
|
1856 |
+
"""Return concatenation of series of transformation matrices.
|
1857 |
+
|
1858 |
+
>>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5
|
1859 |
+
>>> numpy.allclose(M, concatenate_matrices(M))
|
1860 |
+
True
|
1861 |
+
>>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T))
|
1862 |
+
True
|
1863 |
+
|
1864 |
+
"""
|
1865 |
+
M = numpy.identity(4)
|
1866 |
+
for i in matrices:
|
1867 |
+
M = numpy.dot(M, i)
|
1868 |
+
return M
|
1869 |
+
|
1870 |
+
|
1871 |
+
def is_same_transform(matrix0, matrix1):
|
1872 |
+
"""Return True if two matrices perform same transformation.
|
1873 |
+
|
1874 |
+
>>> is_same_transform(numpy.identity(4), numpy.identity(4))
|
1875 |
+
True
|
1876 |
+
>>> is_same_transform(numpy.identity(4), random_rotation_matrix())
|
1877 |
+
False
|
1878 |
+
|
1879 |
+
"""
|
1880 |
+
matrix0 = numpy.array(matrix0, dtype=numpy.float64, copy=True)
|
1881 |
+
matrix0 /= matrix0[3, 3]
|
1882 |
+
matrix1 = numpy.array(matrix1, dtype=numpy.float64, copy=True)
|
1883 |
+
matrix1 /= matrix1[3, 3]
|
1884 |
+
return numpy.allclose(matrix0, matrix1)
|
1885 |
+
|
1886 |
+
|
1887 |
+
def is_same_quaternion(q0, q1):
|
1888 |
+
"""Return True if two quaternions are equal."""
|
1889 |
+
q0 = numpy.array(q0)
|
1890 |
+
q1 = numpy.array(q1)
|
1891 |
+
return numpy.allclose(q0, q1) or numpy.allclose(q0, -q1)
|
1892 |
+
|
1893 |
+
|
1894 |
+
def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'):
|
1895 |
+
"""Try import all public attributes from module into global namespace.
|
1896 |
+
|
1897 |
+
Existing attributes with name clashes are renamed with prefix.
|
1898 |
+
Attributes starting with underscore are ignored by default.
|
1899 |
+
|
1900 |
+
Return True on successful import.
|
1901 |
+
|
1902 |
+
"""
|
1903 |
+
import warnings
|
1904 |
+
from importlib import import_module
|
1905 |
+
try:
|
1906 |
+
if not package:
|
1907 |
+
module = import_module(name)
|
1908 |
+
else:
|
1909 |
+
module = import_module('.' + name, package=package)
|
1910 |
+
except ImportError:
|
1911 |
+
if warn:
|
1912 |
+
warnings.warn('failed to import module %s' % name)
|
1913 |
+
else:
|
1914 |
+
for attr in dir(module):
|
1915 |
+
if ignore and attr.startswith(ignore):
|
1916 |
+
continue
|
1917 |
+
if prefix:
|
1918 |
+
if attr in globals():
|
1919 |
+
globals()[prefix + attr] = globals()[attr]
|
1920 |
+
elif warn:
|
1921 |
+
warnings.warn('no Python implementation of ' + attr)
|
1922 |
+
globals()[attr] = getattr(module, attr)
|
1923 |
+
return True
|
1924 |
+
|
1925 |
+
|
1926 |
+
_import_module('_transformations')
|
1927 |
+
|
1928 |
+
if __name__ == '__main__':
|
1929 |
+
import doctest
|
1930 |
+
import random # noqa: used in doctests
|
1931 |
+
try:
|
1932 |
+
numpy.set_printoptions(suppress=True, precision=5, legacy='1.13')
|
1933 |
+
except TypeError:
|
1934 |
+
numpy.set_printoptions(suppress=True, precision=5)
|
1935 |
+
doctest.testmod()
|
lib/utils.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
|
3 |
+
def setup_logger(logger_name, log_file, level=logging.INFO):
|
4 |
+
l = logging.getLogger(logger_name)
|
5 |
+
formatter = logging.Formatter('%(asctime)s : %(message)s')
|
6 |
+
fileHandler = logging.FileHandler(log_file, mode='w')
|
7 |
+
fileHandler.setFormatter(formatter)
|
8 |
+
|
9 |
+
l.setLevel(level)
|
10 |
+
l.addHandler(fileHandler)
|
11 |
+
|
12 |
+
streamHandler = logging.StreamHandler()
|
13 |
+
streamHandler.setFormatter(formatter)
|
14 |
+
l.addHandler(streamHandler)
|
15 |
+
return l
|
replace_ycb_toolbox/evaluate_poses_keyframe.m
ADDED
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
function evaluate_poses_keyframe
|
2 |
+
|
3 |
+
opt = globals();
|
4 |
+
|
5 |
+
% read class names
|
6 |
+
fid = fopen('classes.txt', 'r');
|
7 |
+
C = textscan(fid, '%s');
|
8 |
+
object_names = C{1};
|
9 |
+
fclose(fid);
|
10 |
+
|
11 |
+
% load model points
|
12 |
+
num_objects = numel(object_names);
|
13 |
+
models = cell(num_objects, 1);
|
14 |
+
for i = 1:num_objects
|
15 |
+
filename = fullfile(opt.root, 'models', object_names{i}, 'points.xyz');
|
16 |
+
disp(filename);
|
17 |
+
models{i} = load(filename);
|
18 |
+
end
|
19 |
+
|
20 |
+
% load the keyframe indexes
|
21 |
+
fid = fopen('keyframe.txt', 'r');
|
22 |
+
C = textscan(fid, '%s');
|
23 |
+
keyframes = C{1};
|
24 |
+
fclose(fid);
|
25 |
+
|
26 |
+
% save results
|
27 |
+
distances_sys = zeros(100000, 5);
|
28 |
+
distances_non = zeros(100000, 5);
|
29 |
+
errors_rotation = zeros(100000, 5);
|
30 |
+
errors_translation = zeros(100000, 5);
|
31 |
+
results_seq_id = zeros(100000, 1);
|
32 |
+
results_frame_id = zeros(100000, 1);
|
33 |
+
results_object_id = zeros(100000, 1);
|
34 |
+
results_cls_id = zeros(100000, 1);
|
35 |
+
|
36 |
+
% for each image
|
37 |
+
count = 0;
|
38 |
+
for i = 1:numel(keyframes)
|
39 |
+
|
40 |
+
% parse keyframe name
|
41 |
+
name = keyframes{i};
|
42 |
+
pos = strfind(name, '/');
|
43 |
+
seq_id = str2double(name(1:pos-1));
|
44 |
+
frame_id = str2double(name(pos+1:end));
|
45 |
+
|
46 |
+
% load PoseCNN result
|
47 |
+
filename = sprintf('results_PoseCNN_RSS2018/%06d.mat', i - 1);
|
48 |
+
result = load(filename);
|
49 |
+
filename = sprintf('Densefusion_iterative_result/%04d.mat', i - 1);
|
50 |
+
result_my = load(filename);
|
51 |
+
filename = sprintf('Densefusion_wo_refine_result/%04d.mat', i - 1);
|
52 |
+
result_mygt = load(filename);
|
53 |
+
|
54 |
+
% load 3D coordinate regression result
|
55 |
+
filename = sprintf('results_3DCoordinate/%04d.mat', i - 1);
|
56 |
+
result_3DCoordinate = load(filename);
|
57 |
+
|
58 |
+
% load gt poses
|
59 |
+
filename = fullfile(opt.root, 'data', sprintf('%04d/%06d-meta.mat', seq_id, frame_id));
|
60 |
+
disp(filename);
|
61 |
+
gt = load(filename);
|
62 |
+
|
63 |
+
% for each gt poses
|
64 |
+
for j = 1:numel(gt.cls_indexes)
|
65 |
+
count = count + 1;
|
66 |
+
cls_index = gt.cls_indexes(j);
|
67 |
+
RT_gt = gt.poses(:, :, j);
|
68 |
+
|
69 |
+
results_seq_id(count) = seq_id;
|
70 |
+
results_frame_id(count) = frame_id;
|
71 |
+
results_object_id(count) = j;
|
72 |
+
results_cls_id(count) = cls_index;
|
73 |
+
|
74 |
+
% network result
|
75 |
+
roi_index = find(result.rois(:, 2) == cls_index);
|
76 |
+
if isempty(roi_index) == 0
|
77 |
+
RT = zeros(3, 4);
|
78 |
+
|
79 |
+
% pose from network
|
80 |
+
RT(1:3, 1:3) = quat2rotm(result_my.poses(roi_index, 1:4));
|
81 |
+
RT(:, 4) = result_my.poses(roi_index, 5:7);
|
82 |
+
distances_sys(count, 1) = adi(RT, RT_gt, models{cls_index}');
|
83 |
+
distances_non(count, 1) = add(RT, RT_gt, models{cls_index}');
|
84 |
+
errors_rotation(count, 1) = re(RT(1:3, 1:3), RT_gt(1:3, 1:3));
|
85 |
+
errors_translation(count, 1) = te(RT(:, 4), RT_gt(:, 4));
|
86 |
+
|
87 |
+
% pose after ICP refinement
|
88 |
+
RT(1:3, 1:3) = quat2rotm(result.poses_icp(roi_index, 1:4));
|
89 |
+
RT(:, 4) = result.poses_icp(roi_index, 5:7);
|
90 |
+
distances_sys(count, 2) = adi(RT, RT_gt, models{cls_index}');
|
91 |
+
distances_non(count, 2) = add(RT, RT_gt, models{cls_index}');
|
92 |
+
errors_rotation(count, 2) = re(RT(1:3, 1:3), RT_gt(1:3, 1:3));
|
93 |
+
errors_translation(count, 2) = te(RT(:, 4), RT_gt(:, 4));
|
94 |
+
|
95 |
+
% pose from multiview
|
96 |
+
RT(1:3, 1:3) = quat2rotm(result_mygt.poses(roi_index, 1:4));
|
97 |
+
RT(:, 4) = result_mygt.poses(roi_index, 5:7);
|
98 |
+
distances_sys(count, 3) = adi(RT, RT_gt, models{cls_index}');
|
99 |
+
distances_non(count, 3) = add(RT, RT_gt, models{cls_index}');
|
100 |
+
errors_rotation(count, 3) = re(RT(1:3, 1:3), RT_gt(1:3, 1:3));
|
101 |
+
errors_translation(count, 3) = te(RT(:, 4), RT_gt(:, 4));
|
102 |
+
%
|
103 |
+
|
104 |
+
% % pose from multiview + ICP
|
105 |
+
% RT(1:3, 1:3) = quat2rotm(result.poses_multiview_icp(roi_index, 1:4));
|
106 |
+
% RT(:, 4) = result.poses_multiview_icp(roi_index, 5:7);
|
107 |
+
% distances_sys(count, 4) = adi(RT, RT_gt, models{cls_index}');
|
108 |
+
% distances_non(count, 4) = add(RT, RT_gt, models{cls_index}');
|
109 |
+
% errors_rotation(count, 4) = re(RT(1:3, 1:3), RT_gt(1:3, 1:3));
|
110 |
+
% errors_translation(count, 4) = te(RT(:, 4), RT_gt(:, 4));
|
111 |
+
else
|
112 |
+
distances_sys(count, 1:4) = inf;
|
113 |
+
distances_non(count, 1:4) = inf;
|
114 |
+
errors_rotation(count, 1:4) = inf;
|
115 |
+
errors_translation(count, 1:4) = inf;
|
116 |
+
end
|
117 |
+
|
118 |
+
|
119 |
+
% 3D Coordinate regression result
|
120 |
+
roi_index = find(result_3DCoordinate.rois(:, 2) == cls_index);
|
121 |
+
if isempty(roi_index) == 0
|
122 |
+
RT = zeros(3, 4);
|
123 |
+
RT(1:3, 1:3) = quat2rotm(result_3DCoordinate.poses(roi_index, 1:4));
|
124 |
+
RT(:, 4) = result_3DCoordinate.poses(roi_index, 5:7);
|
125 |
+
distances_sys(count, 5) = adi(RT, RT_gt, models{cls_index}');
|
126 |
+
distances_non(count, 5) = add(RT, RT_gt, models{cls_index}');
|
127 |
+
errors_rotation(count, 5) = re(RT(1:3, 1:3), RT_gt(1:3, 1:3));
|
128 |
+
errors_translation(count, 5) = te(RT(:, 4), RT_gt(:, 4));
|
129 |
+
else
|
130 |
+
distances_sys(count, 5) = inf;
|
131 |
+
distances_non(count, 5) = inf;
|
132 |
+
errors_rotation(count, 5) = inf;
|
133 |
+
errors_translation(count, 5) = inf;
|
134 |
+
end
|
135 |
+
end
|
136 |
+
end
|
137 |
+
distances_sys = distances_sys(1:count, :);
|
138 |
+
distances_non = distances_non(1:count, :);
|
139 |
+
errors_rotation = errors_rotation(1:count, :);
|
140 |
+
errors_translation = errors_translation(1:count, :);
|
141 |
+
results_seq_id = results_seq_id(1:count);
|
142 |
+
results_frame_id = results_frame_id(1:count);
|
143 |
+
results_object_id = results_object_id(1:count, :);
|
144 |
+
results_cls_id = results_cls_id(1:count, :);
|
145 |
+
save('results_keyframe.mat', 'distances_sys', 'distances_non', 'errors_rotation', 'errors_translation',...
|
146 |
+
'results_seq_id', 'results_frame_id', 'results_object_id', 'results_cls_id');
|
147 |
+
|
148 |
+
function pts_new = transform_pts_Rt(pts, RT)
|
149 |
+
% """
|
150 |
+
% Applies a rigid transformation to 3D points.
|
151 |
+
%
|
152 |
+
% :param pts: nx3 ndarray with 3D points.
|
153 |
+
% :param R: 3x3 rotation matrix.
|
154 |
+
% :param t: 3x1 translation vector.
|
155 |
+
% :return: nx3 ndarray with transformed 3D points.
|
156 |
+
% """
|
157 |
+
n = size(pts, 2);
|
158 |
+
pts_new = RT * [pts; ones(1, n)];
|
159 |
+
|
160 |
+
function error = add(RT_est, RT_gt, pts)
|
161 |
+
% """
|
162 |
+
% Average Distance of Model Points for objects with no indistinguishable views
|
163 |
+
% - by Hinterstoisser et al. (ACCV 2012).
|
164 |
+
%
|
165 |
+
% :param R_est, t_est: Estimated pose (3x3 rot. matrix and 3x1 trans. vector).
|
166 |
+
% :param R_gt, t_gt: GT pose (3x3 rot. matrix and 3x1 trans. vector).
|
167 |
+
% :param model: Object model given by a dictionary where item 'pts'
|
168 |
+
% is nx3 ndarray with 3D model points.
|
169 |
+
% :return: Error of pose_est w.r.t. pose_gt.
|
170 |
+
% """
|
171 |
+
pts_est = transform_pts_Rt(pts, RT_est);
|
172 |
+
pts_gt = transform_pts_Rt(pts, RT_gt);
|
173 |
+
diff = pts_est - pts_gt;
|
174 |
+
error = mean(sqrt(sum(diff.^2, 1)));
|
175 |
+
|
176 |
+
function error = adi(RT_est, RT_gt, pts)
|
177 |
+
% """
|
178 |
+
% Average Distance of Model Points for objects with indistinguishable views
|
179 |
+
% - by Hinterstoisser et al. (ACCV 2012).
|
180 |
+
%
|
181 |
+
% :param R_est, t_est: Estimated pose (3x3 rot. matrix and 3x1 trans. vector).
|
182 |
+
% :param R_gt, t_gt: GT pose (3x3 rot. matrix and 3x1 trans. vector).
|
183 |
+
% :param model: Object model given by a dictionary where item 'pts'
|
184 |
+
% is nx3 ndarray with 3D model points.
|
185 |
+
% :return: Error of pose_est w.r.t. pose_gt.
|
186 |
+
% """
|
187 |
+
pts_est = transform_pts_Rt(pts, RT_est);
|
188 |
+
pts_gt = transform_pts_Rt(pts, RT_gt);
|
189 |
+
|
190 |
+
% Calculate distances to the nearest neighbors from pts_gt to pts_est
|
191 |
+
MdlKDT = KDTreeSearcher(pts_est');
|
192 |
+
[~, D] = knnsearch(MdlKDT, pts_gt');
|
193 |
+
error = mean(D);
|
194 |
+
|
195 |
+
function error = re(R_est, R_gt)
|
196 |
+
% """
|
197 |
+
% Rotational Error.
|
198 |
+
%
|
199 |
+
% :param R_est: Rotational element of the estimated pose (3x1 vector).
|
200 |
+
% :param R_gt: Rotational element of the ground truth pose (3x1 vector).
|
201 |
+
% :return: Error of t_est w.r.t. t_gt.
|
202 |
+
% """
|
203 |
+
|
204 |
+
error_cos = 0.5 * (trace(R_est * inv(R_gt)) - 1.0);
|
205 |
+
error_cos = min(1.0, max(-1.0, error_cos));
|
206 |
+
error = acos(error_cos);
|
207 |
+
error = 180.0 * error / pi;
|
208 |
+
|
209 |
+
function error = te(t_est, t_gt)
|
210 |
+
% """
|
211 |
+
% Translational Error.
|
212 |
+
%
|
213 |
+
% :param t_est: Translation element of the estimated pose (3x1 vector).
|
214 |
+
% :param t_gt: Translation element of the ground truth pose (3x1 vector).
|
215 |
+
% :return: Error of t_est w.r.t. t_gt.
|
216 |
+
% """
|
217 |
+
error = norm(t_gt - t_est);
|
replace_ycb_toolbox/plot_accuracy_keyframe.m
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
function plot_accuracy_keyframe
|
2 |
+
|
3 |
+
color = {'r', 'y', 'g', 'b', 'm'};
|
4 |
+
leng = {'iterative', 'PoseCNN+ICP', 'per-pixel', '3DCoordinate', ...
|
5 |
+
'3D'};
|
6 |
+
aps = zeros(5, 1);
|
7 |
+
lengs = cell(5, 1);
|
8 |
+
close all;
|
9 |
+
|
10 |
+
% load results
|
11 |
+
object = load('results_keyframe.mat');
|
12 |
+
distances_sys = object.distances_sys;
|
13 |
+
distances_non = object.distances_non;
|
14 |
+
rotations = object.errors_rotation;
|
15 |
+
translations = object.errors_translation;
|
16 |
+
cls_ids = object.results_cls_id;
|
17 |
+
|
18 |
+
index_plot = [2, 3, 1, 5];
|
19 |
+
|
20 |
+
% read class names
|
21 |
+
fid = fopen('classes.txt', 'r');
|
22 |
+
C = textscan(fid, '%s');
|
23 |
+
classes = C{1};
|
24 |
+
classes{end+1} = 'All 21 objects';
|
25 |
+
fclose(fid);
|
26 |
+
|
27 |
+
hf = figure('units','normalized','outerposition',[0 0 1 1]);
|
28 |
+
font_size = 12;
|
29 |
+
max_distance = 0.1;
|
30 |
+
|
31 |
+
% for each class
|
32 |
+
for k = 1:numel(classes)
|
33 |
+
index = find(cls_ids == k);
|
34 |
+
if isempty(index)
|
35 |
+
index = 1:size(distances_sys,1);
|
36 |
+
end
|
37 |
+
|
38 |
+
% distance symmetry
|
39 |
+
subplot(2, 2, 1);
|
40 |
+
for i = index_plot
|
41 |
+
D = distances_sys(index, i);
|
42 |
+
D(D > max_distance) = inf;
|
43 |
+
d = sort(D);
|
44 |
+
n = numel(d);
|
45 |
+
c = numel(d(d < 0.02));
|
46 |
+
accuracy = cumsum(ones(1, n)) / n;
|
47 |
+
% fprintf('k = %d i = %d : length %d\n',k,i,length(d));
|
48 |
+
% dd = find(d == d(end));
|
49 |
+
% ddd = find(d ~= d(end));
|
50 |
+
% fprintf('k = %d i = %d : length %d %d %d %d\n',k,i,length(d), length(dd), d(end), ddd(end));
|
51 |
+
|
52 |
+
plot(d, accuracy, color{i}, 'LineWidth', 4);
|
53 |
+
aps(i) = VOCap(d, accuracy);
|
54 |
+
lengs{i} = sprintf('%s(AUC:%.2f)(<2cm:%.2f)', leng{i}, aps(i)*100, (c/n)*100);
|
55 |
+
hold on;
|
56 |
+
end
|
57 |
+
hold off;
|
58 |
+
%h = legend('network', 'refine tranlation only', 'icp', 'stereo translation only', 'stereo full', '3d coordinate');
|
59 |
+
%set(h, 'FontSize', 16);
|
60 |
+
h = legend(lengs(index_plot), 'Location', 'southeast');
|
61 |
+
set(h, 'FontSize', font_size);
|
62 |
+
h = xlabel('Average distance threshold in meter (symmetry)');
|
63 |
+
set(h, 'FontSize', font_size);
|
64 |
+
h = ylabel('accuracy');
|
65 |
+
set(h, 'FontSize', font_size);
|
66 |
+
h = title(classes{k}, 'Interpreter', 'none');
|
67 |
+
set(h, 'FontSize', font_size);
|
68 |
+
xt = get(gca, 'XTick');
|
69 |
+
set(gca, 'FontSize', font_size)
|
70 |
+
|
71 |
+
% distance non-symmetry
|
72 |
+
subplot(2, 2, 2);
|
73 |
+
for i = index_plot
|
74 |
+
D = distances_non(index, i);
|
75 |
+
D(D > max_distance) = inf;
|
76 |
+
d = sort(D);
|
77 |
+
n = numel(d);
|
78 |
+
c = numel(d(d < 0.02));
|
79 |
+
accuracy = cumsum(ones(1, n)) / n;
|
80 |
+
plot(d, accuracy, color{i}, 'LineWidth', 4);
|
81 |
+
aps(i) = VOCap(d, accuracy);
|
82 |
+
lengs{i} = sprintf('%s(AUC:%.2f)(<2cm:%.2f)', leng{i}, aps(i)*100, (c/n)*100);
|
83 |
+
hold on;
|
84 |
+
end
|
85 |
+
hold off;
|
86 |
+
%h = legend('network', 'refine tranlation only', 'icp', 'stereo translation only', 'stereo full', '3d coordinate');
|
87 |
+
%set(h, 'FontSize', 16);
|
88 |
+
h = legend(lengs(index_plot), 'Location', 'southeast');
|
89 |
+
set(h, 'FontSize', font_size);
|
90 |
+
h = xlabel('Average distance threshold in meter (non-symmetry)');
|
91 |
+
set(h, 'FontSize', font_size);
|
92 |
+
h = ylabel('accuracy');
|
93 |
+
set(h, 'FontSize', font_size);
|
94 |
+
h = title(classes{k}, 'Interpreter', 'none');
|
95 |
+
set(h, 'FontSize', font_size);
|
96 |
+
xt = get(gca, 'XTick');
|
97 |
+
set(gca, 'FontSize', font_size)
|
98 |
+
|
99 |
+
% rotation
|
100 |
+
subplot(2, 2, 3);
|
101 |
+
for i = index_plot
|
102 |
+
D = rotations(index, i);
|
103 |
+
d = sort(D);
|
104 |
+
n = numel(d);
|
105 |
+
accuracy = cumsum(ones(1, n)) / n;
|
106 |
+
plot(d, accuracy, color{i}, 'LineWidth', 4);
|
107 |
+
hold on;
|
108 |
+
end
|
109 |
+
hold off;
|
110 |
+
%h = legend('network', 'refine tranlation only', 'icp', 'stereo translation only', 'stereo full', '3d coordinate');
|
111 |
+
%set(h, 'FontSize', 16);
|
112 |
+
h = legend(leng(index_plot), 'Location', 'southeast');
|
113 |
+
set(h, 'FontSize', font_size);
|
114 |
+
h = xlabel('Rotation angle threshold');
|
115 |
+
set(h, 'FontSize', font_size);
|
116 |
+
h = ylabel('accuracy');
|
117 |
+
set(h, 'FontSize', font_size);
|
118 |
+
h = title(classes{k}, 'Interpreter', 'none');
|
119 |
+
set(h, 'FontSize', font_size);
|
120 |
+
xt = get(gca, 'XTick');
|
121 |
+
set(gca, 'FontSize', font_size)
|
122 |
+
|
123 |
+
% translation
|
124 |
+
subplot(2, 2, 4);
|
125 |
+
for i = index_plot
|
126 |
+
D = translations(index, i);
|
127 |
+
D(D > max_distance) = inf;
|
128 |
+
d = sort(D);
|
129 |
+
n = numel(d);
|
130 |
+
accuracy = cumsum(ones(1, n)) / n;
|
131 |
+
plot(d, accuracy, color{i}, 'LineWidth', 4);
|
132 |
+
hold on;
|
133 |
+
end
|
134 |
+
hold off;
|
135 |
+
h = legend(leng(index_plot), 'Location', 'southeast');
|
136 |
+
set(h, 'FontSize', font_size);
|
137 |
+
h = xlabel('Translation threshold in meter');
|
138 |
+
set(h, 'FontSize', font_size);
|
139 |
+
h = ylabel('accuracy');
|
140 |
+
set(h, 'FontSize', font_size);
|
141 |
+
h = title(classes{k}, 'Interpreter', 'none');
|
142 |
+
set(h, 'FontSize', font_size);
|
143 |
+
xt = get(gca, 'XTick');
|
144 |
+
set(gca, 'FontSize', font_size)
|
145 |
+
|
146 |
+
filename = sprintf('plots/%s.png', classes{k});
|
147 |
+
hgexport(hf, filename, hgexport('factorystyle'), 'Format', 'png');
|
148 |
+
end
|
149 |
+
|
150 |
+
function ap = VOCap(rec, prec)
|
151 |
+
|
152 |
+
index = isfinite(rec);
|
153 |
+
rec = rec(index);
|
154 |
+
prec = prec(index)';
|
155 |
+
|
156 |
+
mrec=[0 ; rec ; 0.1];
|
157 |
+
% disp(prec)
|
158 |
+
% disp(end)
|
159 |
+
% disp(length(prec))
|
160 |
+
% if length(prec) == 0
|
161 |
+
% prec(1) = 1;
|
162 |
+
% end
|
163 |
+
% disp(prec(end))
|
164 |
+
|
165 |
+
mpre=[0 ; prec ; prec(end)];
|
166 |
+
for i = 2:numel(mpre)
|
167 |
+
mpre(i) = max(mpre(i), mpre(i-1));
|
168 |
+
end
|
169 |
+
i = find(mrec(2:end) ~= mrec(1:end-1)) + 1;
|
170 |
+
ap = sum((mrec(i) - mrec(i-1)) .* mpre(i)) * 10;
|
run.sh
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/sh
|
2 |
+
|
3 |
+
# Runs a docker container with the image created by build.sh
|
4 |
+
until nvidia-docker ps
|
5 |
+
do
|
6 |
+
echo "Waiting for docker server"
|
7 |
+
sleep 1
|
8 |
+
done
|
9 |
+
|
10 |
+
XSOCK=/tmp/.X11-unix
|
11 |
+
|
12 |
+
XAUTH=/root/.Xauthority
|
13 |
+
|
14 |
+
SRC_CONTAINER=/root/dense_fusion
|
15 |
+
SRC_HOST="$(pwd)"
|
16 |
+
|
17 |
+
xhost local:root
|
18 |
+
|
19 |
+
nvidia-docker run \
|
20 |
+
--name dense_fusion \
|
21 |
+
-it --rm \
|
22 |
+
--volume=$XSOCK:$XSOCK:rw \
|
23 |
+
--volume=$XAUTH:$XAUTH:rw \
|
24 |
+
--volume=$SRC_HOST:$SRC_CONTAINER:rw \
|
25 |
+
--env="XAUTHORITY=${XAUTH}" \
|
26 |
+
--env="DISPLAY=${DISPLAY}" \
|
27 |
+
--privileged -v /dev/bus/usb:/dev/bus/usb \
|
28 |
+
--net=host \
|
29 |
+
dense_fusion
|
tools/_init_paths.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
sys.path.insert(0, os.getcwd())
|