Skip to content

Commit

Permalink
Added dataset folder, git files, and python dependencies.
Browse files Browse the repository at this point in the history
  • Loading branch information
nubertj committed Apr 28, 2021
1 parent c910d47 commit ac92c48
Show file tree
Hide file tree
Showing 6 changed files with 553 additions and 0 deletions.
10 changes: 10 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Byte compiled
__pycache__/
*.pyc
*.egg-info
.idea

# Datasets
datasets/darpa
datasets/kitti
mlruns/
197 changes: 197 additions & 0 deletions conda/DeLORA-py3.9.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
name: DeLORA-py3.9
channels:
- numba
- pytorch
- anaconda
- conda-forge
- defaults
dependencies:
- _libgcc_mutex=0.1=conda_forge
- _openmp_mutex=4.5=1_llvm
- alembic=1.5.7=pyhd8ed1ab_0
- appdirs=1.4.4=pyh9f0ad1d_0
- asn1crypto=1.4.0=pyh9f0ad1d_0
- blas=2.108=mkl
- blas-devel=3.9.0=8_mkl
- brotlipy=0.7.0=py39h3811e60_1001
- bzip2=1.0.8=h7f98852_4
- ca-certificates=2020.12.5=ha878542_0
- certifi=2020.12.5=py39hf3d152e_1
- cffi=1.14.5=py39he32792d_0
- chardet=4.0.0=py39hf3d152e_1
- click=7.1.2=py_0
- cloudpickle=1.6.0=py_0
- configparser=5.0.2=pyhd8ed1ab_0
- cryptography=3.4.6=py39hbca0aa6_0
- cudatoolkit=11.1.1=h6406543_8
- cycler=0.10.0=py39h06a4308_0
- databricks-cli=0.9.1=py_0
- dbus=1.13.18=hb2f20db_0
- docker-py=4.4.4=py39hf3d152e_0
- docker-pycreds=0.4.0=py_0
- entrypoints=0.3=pyhd8ed1ab_1003
- expat=2.2.10=he6710b0_2
- ffmpeg=4.3=hf484d3e_0
- flask=1.1.2=pyh9f0ad1d_0
- fontconfig=2.13.1=h6c09931_0
- freetype=2.10.4=h0708190_1
- gitdb=4.0.5=pyhd8ed1ab_1
- gitpython=3.1.14=pyhd8ed1ab_0
- glib=2.67.4=h36276a3_1
- gmp=6.2.1=h58526e2_0
- gnutls=3.6.13=h85f3911_1
- gorilla=0.3.0=py_0
- gst-plugins-base=1.14.0=h8213a91_2
- gstreamer=1.14.0=h28cd5cc_2
- gunicorn=20.0.4=py39hf3d152e_3
- icu=58.2=he6710b0_3
- idna=2.10=pyh9f0ad1d_0
- itsdangerous=1.1.0=py_0
- jinja2=2.11.3=pyh44b312d_0
- jpeg=9b=h024ee3a_2
- kiwisolver=1.3.1=py39h2531618_0
- kornia=0.3.0=pyh9f0ad1d_0
- lame=3.100=h7f98852_1001
- lcms2=2.11=h396b838_0
- ld_impl_linux-64=2.33.1=h53a641e_7
- libblas=3.9.0=8_mkl
- libcblas=3.9.0=8_mkl
- libedit=3.1.20191231=h14c3975_1
- libffi=3.3=he6710b0_2
- libgcc-ng=9.3.0=h2828fa1_18
- libgfortran-ng=9.3.0=hff62375_18
- libgfortran5=9.3.0=hff62375_18
- libiconv=1.16=h516909a_0
- liblapack=3.9.0=8_mkl
- liblapacke=3.9.0=8_mkl
- libllvm10=10.0.1=hbcb73fb_5
- libpng=1.6.37=h21135ba_2
- libprotobuf=3.15.6=h780b84a_0
- libstdcxx-ng=9.3.0=h6de172a_18
- libtiff=4.1.0=h2733197_1
- libuuid=1.0.3=h1bed415_2
- libxcb=1.14=h7b6447c_0
- libxml2=2.9.10=hb55368b_3
- llvm-openmp=11.0.1=h4bd325d_0
- llvmlite=0.36.0=py39h612dafd_4
- lz4-c=1.9.3=h9c3ff4c_0
- mako=1.1.4=pyh44b312d_0
- markupsafe=1.1.1=py39h3811e60_3
- matplotlib=3.3.4=py39hf3d152e_0
- matplotlib-base=3.3.4=py39h62a2d02_0
- mkl=2020.4=h726a3e6_304
- mkl-devel=2020.4=ha770c72_305
- mkl-include=2020.4=h726a3e6_304
- mlflow=1.2.0=py_1
- ncurses=6.2=he6710b0_1
- nettle=3.6=he412f7d_0
- ninja=1.10.2=h4bd325d_0
- numba=0.53.0rc3=np1.16py3.9hc547734_g1c882cbbf_0
- numpy=1.20.1=py39hdbf815f_0
- olefile=0.46=pyh9f0ad1d_1
- openh264=2.1.1=h780b84a_0
- openssl=1.1.1j=h7f98852_0
- packaging=20.9=pyh44b312d_0
- pandas=1.2.3=py39hde0f152_0
- pcre=8.44=he6710b0_0
- pillow=8.1.1=py39he98fc37_0
- pip=21.0.1=py39h06a4308_0
- protobuf=3.15.6=py39he80948d_0
- pycparser=2.20=pyh9f0ad1d_2
- pyopenssl=20.0.1=pyhd8ed1ab_0
- pyparsing=2.4.7=pyhd3eb1b0_0
- pyqt=5.9.2=py39h2531618_6
- pysocks=1.7.1=py39hf3d152e_3
- python=3.9.2=hdb3f193_0
- python-dateutil=2.8.1=pyhd3eb1b0_0
- python-editor=1.0.4=py_0
- python_abi=3.9=1_cp39
- pytorch=1.8.0=py3.9_cuda11.1_cudnn8.0.5_0
- pytz=2021.1=pyhd8ed1ab_0
- pyyaml=5.4.1=py39h3811e60_0
- qt=5.9.7=h5867ecd_1
- querystring_parser=1.2.4=py_0
- readline=8.1=h27cfd23_0
- requests=2.25.1=pyhd3deb0d_0
- setuptools=52.0.0=py39h06a4308_0
- simplejson=3.17.2=py39h3811e60_2
- sip=4.19.13=py39h2531618_0
- six=1.15.0=py39h06a4308_0
- smmap=3.0.5=pyh44b312d_0
- sqlalchemy=1.4.0=py39h3811e60_0
- sqlite=3.33.0=h62c20be_0
- sqlparse=0.4.1=pyh9f0ad1d_0
- tabulate=0.8.9=pyhd8ed1ab_0
- tk=8.6.10=hbc83047_0
- torchaudio=0.8.0=py39
- torchvision=0.9.0=py39_cu111
- tornado=6.1=py39h27cfd23_0
- typing_extensions=3.7.4.3=py_0
- tzdata=2020f=h52ac0ba_0
- urllib3=1.26.4=pyhd8ed1ab_0
- websocket-client=0.57.0=py39hf3d152e_4
- werkzeug=1.0.1=pyh9f0ad1d_0
- wheel=0.36.2=pyhd3eb1b0_0
- xz=5.2.5=h7b6447c_0
- yaml=0.2.5=h7b6447c_0
- zlib=1.2.11=h7b6447c_3
- zstd=1.4.9=ha95c52a_0
- pip:
- addict==2.4.0
- argon2-cffi==20.1.0
- async-generator==1.10
- attrs==20.3.0
- backcall==0.2.0
- bleach==3.3.0
- catkin-pkg==0.4.23
- decorator==5.0.7
- defusedxml==0.7.1
- distro==1.5.0
- docutils==0.16
- gnupg==2.3.1
- ipykernel==5.5.3
- ipython==7.22.0
- ipython-genutils==0.2.0
- ipywidgets==7.6.3
- jedi==0.18.0
- jsonschema==3.2.0
- jupyter==1.0.0
- jupyter-client==6.1.12
- jupyter-console==6.4.0
- jupyter-core==4.7.1
- jupyterlab-pygments==0.1.2
- jupyterlab-widgets==1.0.0
- mistune==0.8.4
- nbclient==0.5.3
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.5.1
- notebook==6.3.0
- opencv-python==4.5.1.48
- pandocfilters==1.4.3
- parso==0.8.2
- pexpect==4.8.0
- pickleshare==0.7.5
- prometheus-client==0.10.1
- prompt-toolkit==3.0.18
- psutil==5.8.0
- ptyprocess==0.7.0
- pycryptodomex==3.10.1
- pygments==2.8.1
- pykitti==0.3.1
- pyrsistent==0.17.3
- pyzmq==22.0.3
- qqdm==0.0.7
- qtconsole==5.0.3
- qtpy==1.9.0
- rospkg==1.2.10
- scipy==1.6.1
- send2trash==1.5.0
- terminado==0.9.4
- testpath==0.4.4
- traitlets==5.0.5
- wcwidth==0.2.5
- webencodings==0.5.1
- widgetsnbextension==3.5.1
prefix: /home/nubertj/software/conda/anaconda3/envs/DeLORA-py3.9

8 changes: 8 additions & 0 deletions conda/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Conda Installation

## Python3

We provide a conda file. For training, testing, and ROS Noetic deployment please use the Python3 version:
```bash
conda env create -f DeLORA-py3.9.yml
```
67 changes: 67 additions & 0 deletions datasets/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Used Datasets

In general we provide support for rosbags and the kitti dataset. For each dataset we assume the following hierarchical
structure: ```dataset_name/<path_to_rosbag>``` or for KITTI its original sturcture ```dataset_name/sequence/scan```.
Here, sequences are numbered according to 00, 01, ...99. After prepocessing, scans will be numbered according to
00000...99999. An example for preprocessing a rosbag can be seen with the DARPA SubT dataset, the KITTI example can be
seen in the KITTI secion.

## Rosbag - DARPA SubT Dataset Example

Download the DARPA SubT Rosbags: [link](https://bitbucket.org/subtchallenge/subt_reference_datasets/src/master/)

```bash
mkdir $PWD/datasets/darpa/
# Link taken from https://bitbucket.org/subtchallenge/subt_reference_datasets/src/master/
wget https://subt-data.s3.amazonaws.com/SubT_Urban_Ckt/a_lvl_1.bag -O $PWD/datasets/darpa/00.bag

```

### Structure

### Run preprocessing

Pull the rosbag at the above link, and put it to ```<delora_ws>/datasets/darpa/<name>.bag```. Rename it
to ```<delora_ws>/datasets/darpa/00.bag``` (or ```01...99.bag``` if you have multiple sequences). In the
file ```./config/deployment_options.yaml``` set ```datasets: ["darpa"]```. Preprocessing can then be run with the
following command:

```bash
preprocess_data.py
```

If your files are placed somewhere else, simply adapt the path in ```./config/config_datasets.yaml``` (global or local
w.r.t. to python working directory).

## KITTI Dataset
### LiDAR Scans
Download the "velodyne laster data" from the official KITTI odometry evaluation (
80GB): [link](http://www.cvlibs.net/datasets/kitti/eval_odometry.php). Put it to ```<delora_ws>/datasets/kitti```,
where ```kitti``` contains ```/data_odometry_velodyne/dataset/sequences/00..21```.
### Groundtruth poses
Please also download the groundtruth poses [here](http://www.cvlibs.net/datasets/kitti/eval_odometry.php).
Make sure that the files are located at ```<delora_ws>/datasets/kitti```,
where ```kitti``` contains ```/data_odometry_poses/dataset/poses/00..10.txt```.

### Run preprocessing

In the file ```./config/deployment_options.yaml``` set ```datasets: ["kitti"]```. Then run

```bash
preprocess_data.py
```

## Custom Dataset

Just follow the above procedure for custom datasets. Any sequence of rosbags can be used.

## Visualize Processed Dataset

The point cloud and its estimated normals for a dataset can be visualized using the following command:

```bash
visualize_pointcloud_normals.py
```

With this command, the first 100 scans with its normals are published under the topics ```/lidar/points```
and ```/lidar/normals``` in the frame ```lidar``` and can be visualized in *RVIZ*.
Loading

0 comments on commit ac92c48

Please sign in to comment.