forked from PRBonn/semantic-kitti-api
-
Notifications
You must be signed in to change notification settings - Fork 2
/
count.py
executable file
·66 lines (59 loc) · 1.9 KB
/
count.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/usr/bin/env python3
# This file is covered by the LICENSE file in the root of this project.
import argparse
import os
import yaml
import numpy as np
import collections
from auxiliary.laserscan import SemLaserScan
if __name__ == "__main__":
parser = argparse.ArgumentParser("./count.py")
parser.add_argument(
"--dataset",
"-d",
type=str,
required=True,
help="Dataset to calculate scan count. No Default",
)
parser.add_argument(
"--config",
"-c",
type=str,
required=False,
default="config/semantic-kitti.yaml",
help="Dataset config file. Defaults to %(default)s",
)
FLAGS, unparsed = parser.parse_known_args()
# print summary of what we will do
print("*" * 80)
print("INTERFACE:")
print("Dataset", FLAGS.dataset)
print("Config", FLAGS.config)
print("*" * 80)
# open config file
try:
print("Opening config file %s" % FLAGS.config)
CFG = yaml.safe_load(open(FLAGS.config, "r"))
except Exception as e:
print(e)
print("Error opening yaml file.")
quit()
# get training sequences to calculate statistics
sequences = CFG["split"]["train"]
sequences.extend(CFG["split"]["valid"])
sequences.extend(CFG["split"]["test"])
sequences.sort()
print("Analizing sequences", sequences, "to count number of scans")
# iterate over sequences and count scan number
for seq in sequences:
seqstr = "{0:02d}".format(int(seq))
scan_paths = os.path.join(FLAGS.dataset, "sequences", seqstr, "velodyne")
if not os.path.isdir(scan_paths):
print("Sequence", seqstr, "doesn't exist! Exiting...")
quit()
scan_names = [
os.path.join(dp, f)
for dp, dn, fn in os.walk(os.path.expanduser(scan_paths))
for f in fn
]
print(seqstr, ",", len(scan_names))