Skip to content

Commit

Permalink
Add data preprocessing to README.md. Closes #1
Browse files Browse the repository at this point in the history
  • Loading branch information
zhou13 committed May 13, 2019
1 parent ea12abe commit 5ef6ad4
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 37 deletions.
23 changes: 19 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# L-CNN — End-to-End Wireframe Parsing

This repository contains the official PyTorch implementation of the paper: *Yichao Zhou, Haozhi Qi, Yi Ma. ["End-to-End Wireframe Parsing."](https://arxiv.org/abs/1905.03246) arXiv:1905.03246 [cs.CV]*.
This repository contains the official PyTorch implementation of the paper: *[Yichao Zhou](https://yichaozhou.com), [Haozhi Qi](http://haozhi.io), [Yi Ma](https://people.eecs.berkeley.edu/~yima/). ["End-to-End Wireframe Parsing."](https://arxiv.org/abs/1905.03246) arXiv:1905.03246 [cs.CV]*.

## Introduction

Expand Down Expand Up @@ -86,20 +86,35 @@ conda install -y pyyaml docopt matplotlib scikit-image opencv
mkdir data logs post
```

### Downloading data
### Downloading the Processed Dataset
Make sure `curl` is installed on your system and execute
```bash
cd data
../misc/gdrive-download.sh 1T4_6Nb5r4yAXre3lf-zpmp3RbmyP1t9q wireframe.tar.xz
tar xf wireframe.tar.xz
rm *.xz
rm wireframe.tar.xz
cd ..
```

If `gdrive-download.sh` does not work for you, you can download the pre-processed dataset manually from [Google
If `gdrive-download.sh` does not work for you, you can download the pre-processed dataset
`wireframe.tar.xz` manually from [Google
Drive](https://drive.google.com/drive/u/1/folders/1rXLAh5VIj8jwf8vLfuZncStihRO2chFr) and proceed
accordingly.

#### Processing the Dataset
*Optionally*, you can pre-process (e.g., generate heat maps, do data augmentation) the dataset from
scratch rather than downloading the processed one. **Skip** this section if you just want to use
the pre-processed dataset `wireframe.tar.xz`.

```bash
cd data
../misc/gdrive-download.sh 1BRkqyi5CKPQF6IYzj_dQxZFQl0OwbzOf wireframe_raw.tar.xz
tar xf wireframe_raw.tar.xz
rm wireframe_raw.tar.xz
cd ..
dataset/wireframe.py data/wireframe_raw data/wireframe
```

### Training
To train the neural network on GPU 0 (specified by `-d 0`) with the default parameters, execute
```bash
Expand Down
47 changes: 14 additions & 33 deletions misc/draw-wireframe.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#!/usr/bin/env python3
import os
import glob
import os.path as osp
Expand All @@ -10,12 +11,10 @@

from lcnn.utils import parmap

PRED = "/run/media/zyc/YZBackup/Paper/ICCV2019/LCNN/logs/190320-033347-abdf9cb-hg-q2/npz/000072000/*.npz"
GT = "data/q2/valid/*.npz"
WF = "/data/wirebase/result/wireframe/wireframe_1_rerun-baseline_0.5_0.5/2/*.mat"
AFM = "/data/wirebase/result/wireframe/afm/*.npz"
IMGS = "/data/wirebase/data/v1.1/test/*.jpg"
LL = "/data/bible/linelet/*.mat"
GT = "data/wireframe/valid/*.npz"
WF = "/data/lcnn/wirebase/result/wireframe/wireframe_1_rerun-baseline_0.5_0.5/2/*.mat"
AFM = "/data/lcnn/wirebase/result/wireframe/afm/*.npz"
IMGS = "/data/lcnn/wirebase/Wireframe/v1.1/test/*.jpg"


def imshow(im):
Expand All @@ -34,26 +33,20 @@ def imshow(im):


def main():
gts = glob.glob(GT)
gts.sort()
afm = glob.glob(AFM)
afm.sort()
wf = glob.glob(WF)
wf.sort()
img = glob.glob(IMGS)
img.sort()
ll = glob.glob(LL)
ll.sort()

prefix = "/data/wirebase/myplot/"
gts = sorted(glob.glob(GT))
afm = sorted(glob.glob(AFM))
wf = sorted(glob.glob(WF))
img = sorted(glob.glob(IMGS))

prefix = "/data/lcnn/wirebase/myplot/"
os.makedirs(osp.join(prefix, "GT"), exist_ok=True)
os.makedirs(osp.join(prefix, "LSD"), exist_ok=True)
os.makedirs(osp.join(prefix, "AFM"), exist_ok=True)
os.makedirs(osp.join(prefix, "WF"), exist_ok=True)
os.makedirs(osp.join(prefix, "LL"), exist_ok=True)

def draw(args):
i, (wf_name, gt_name, afm_name, ll_name, img_name) = args
i, (wf_name, gt_name, afm_name, img_name) = args
img = cv2.imread(img_name, 0)
lsd = cv2.createLineSegmentDetector(cv2.LSD_REFINE_ADV)
lsd_line, _, _, lsd_score = lsd.detect(img)
Expand All @@ -72,25 +65,13 @@ def draw(args):
wf_line = scipy.io.loadmat(wf_name)["lines"].reshape(-1, 2, 2)
wf_line = wf_line[:, :, ::-1]

ll_line = scipy.io.loadmat(wf_name)["lines"].reshape(-1, 2, 2)
ll_line = ll_line[:, :, ::-1]

plt.figure("LL")
imshow(img)
for a, b in ll_line - 0.5:
plt.plot([a[1], b[1]], [a[0], b[0]], color="orange", linewidth=0.5)
plt.scatter(a[1], a[0], color="#33FFFF", s=1.2, edgecolors="none", zorder=5)
plt.scatter(b[1], b[0], color="#33FFFF", s=1.2, edgecolors="none", zorder=5)
plt.savefig(osp.join(prefix, "LL", f"{i:05}"), dpi=500, bbox_inches=0)
plt.close()

plt.figure("GT")
imshow(img)
for a, b in gt_line - 0.5:
plt.plot([a[1], b[1]], [a[0], b[0]], color="orange", linewidth=0.5)
plt.scatter(a[1], a[0], color="#33FFFF", s=1.2, edgecolors="none", zorder=5)
plt.scatter(b[1], b[0], color="#33FFFF", s=1.2, edgecolors="none", zorder=5)
plt.savefig(osp.join(prefix, "GT", f"{i:05}"), dpi=500, bbox_inches=0)
plt.savefig(osp.join(prefix, "GT", f"{i:05}"), dpi=3000, bbox_inches=0)
plt.close()

plt.figure("LSD")
Expand Down Expand Up @@ -120,7 +101,7 @@ def draw(args):
plt.savefig(osp.join(prefix, "WF", f"{i:05}"), dpi=3000, bbox_inches=0)
plt.close()

parmap(draw, enumerate(zip(wf, gts, afm, ll, img)))
parmap(draw, enumerate(zip(wf, gts, afm, img)))


if __name__ == "__main__":
Expand Down

0 comments on commit 5ef6ad4

Please sign in to comment.