Skip to content

Commit cf1ceb4

Browse files
committed
tiny rotated object detection with weights
0 parents  commit cf1ceb4

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+1548
-0
lines changed

bsridatta.egg-info/PKG-INFO

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
Metadata-Version: 1.2
2+
Name: bsridatta
3+
Version: 0.0.1
4+
Summary: UNKNOWN
5+
Home-page: UNKNOWN
6+
Author: Sri Datta Budaraju
7+
Author-email: b.sridatta@gmail.com
8+
License: UNKNOWN
9+
Description: UNKNOWN
10+
Platform: UNKNOWN
11+
Requires-Python: >=3.7

bsridatta.egg-info/SOURCES.txt

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
setup.py
2+
bsridatta.egg-info/PKG-INFO
3+
bsridatta.egg-info/SOURCES.txt
4+
bsridatta.egg-info/dependency_links.txt
5+
bsridatta.egg-info/top_level.txt
6+
src/__init__.py
7+
src/dataloader.py
8+
src/dataset.py
9+
src/train.py
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+

bsridatta.egg-info/top_level.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
src

model.hdf5

18.2 MB
Binary file not shown.

reports/FPN-summary ordered table.png

224 KB
Loading

reports/FPN_torchsummary.txt

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
2+
----------------------------------------------------------------
3+
Layer (type) Output Shape Param #
4+
================================================================
5+
Conv2d-1 [-1, 8, 200, 200] 72
6+
BatchNorm2d-2 [-1, 8, 200, 200] 16
7+
MaxPool2d-3 [-1, 8, 100, 100] 0
8+
Conv2d-4 [-1, 16, 100, 100] 1,152
9+
BatchNorm2d-5 [-1, 16, 100, 100] 32
10+
MaxPool2d-6 [-1, 16, 50, 50] 0
11+
Conv2d-7 [-1, 32, 25, 25] 4,608
12+
BatchNorm2d-8 [-1, 32, 25, 25] 64
13+
Mish-9 [-1, 32, 25, 25] 0
14+
Conv2d-10 [-1, 32, 25, 25] 9,216
15+
BatchNorm2d-11 [-1, 32, 25, 25] 64
16+
Conv2d-12 [-1, 32, 25, 25] 512
17+
BatchNorm2d-13 [-1, 32, 25, 25] 64
18+
Mish-14 [-1, 32, 25, 25] 0
19+
ConvBlock-15 [-1, 32, 25, 25] 0
20+
Conv2d-16 [-1, 64, 13, 13] 18,432
21+
BatchNorm2d-17 [-1, 64, 13, 13] 128
22+
Mish-18 [-1, 64, 13, 13] 0
23+
Conv2d-19 [-1, 64, 13, 13] 36,864
24+
BatchNorm2d-20 [-1, 64, 13, 13] 128
25+
Conv2d-21 [-1, 64, 13, 13] 2,048
26+
BatchNorm2d-22 [-1, 64, 13, 13] 128
27+
Mish-23 [-1, 64, 13, 13] 0
28+
ConvBlock-24 [-1, 64, 13, 13] 0
29+
Conv2d-25 [-1, 128, 7, 7] 73,728
30+
BatchNorm2d-26 [-1, 128, 7, 7] 256
31+
Mish-27 [-1, 128, 7, 7] 0
32+
Conv2d-28 [-1, 128, 7, 7] 147,456
33+
BatchNorm2d-29 [-1, 128, 7, 7] 256
34+
Conv2d-30 [-1, 128, 7, 7] 8,192
35+
BatchNorm2d-31 [-1, 128, 7, 7] 256
36+
Mish-32 [-1, 128, 7, 7] 0
37+
ConvBlock-33 [-1, 128, 7, 7] 0
38+
Conv2d-34 [-1, 256, 4, 4] 294,912
39+
BatchNorm2d-35 [-1, 256, 4, 4] 512
40+
Mish-36 [-1, 256, 4, 4] 0
41+
Conv2d-37 [-1, 256, 4, 4] 589,824
42+
BatchNorm2d-38 [-1, 256, 4, 4] 512
43+
Conv2d-39 [-1, 256, 4, 4] 32,768
44+
BatchNorm2d-40 [-1, 256, 4, 4] 512
45+
Mish-41 [-1, 256, 4, 4] 0
46+
ConvBlock-42 [-1, 256, 4, 4] 0
47+
Conv2d-43 [-1, 256, 4, 4] 65,792
48+
Conv2d-44 [-1, 256, 7, 7] 33,024
49+
Conv2d-45 [-1, 256, 13, 13] 16,640
50+
Conv2d-46 [-1, 256, 25, 25] 8,448
51+
Conv2d-47 [-1, 256, 25, 25] 590,080
52+
AdaptiveAvgPool2d-48 [-1, 256, 1, 1] 0
53+
AdaptiveAvgPool2d-49 [-1, 256, 1, 1] 0
54+
Flatten-50 [-1, 256] 0
55+
Linear-51 [-1, 1] 257
56+
Flatten-52 [-1, 256] 0
57+
Linear-53 [-1, 5] 1,285
58+
================================================================
59+
Total params: 1,938,238
60+
Trainable params: 1,938,238
61+
Non-trainable params: 0
62+
----------------------------------------------------------------
63+
Input size (MB): 0.15
64+
Forward/backward pass size (MB): 13.97
65+
Params size (MB): 7.39
66+
Estimated Total Size (MB): 21.52
67+
----------------------------------------------------------------

requirements.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
matplotlib==3.3.1
2+
numpy==1.18.5
3+
tqdm==4.48.2
4+
Shapely==1.7.0
5+
scikit_image==0.16.2
6+
torch==1.6.0
7+
torchvision==0.7.0
8+
tensorflow==2.3.0
9+
Pillow==7.2.0
10+
pytest==6.0.2
11+
torchsummary==1.5.1

setup.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import setuptools
2+
3+
setuptools.setup(
4+
name="bsridatta",
5+
version="0.0.1",
6+
author="Sri Datta Budaraju",
7+
author_email="b.sridatta@gmail.com",
8+
packages=setuptools.find_packages(),
9+
python_requires='>=3.6'
10+
)

src/__init__.py

Whitespace-only changes.

src/callbacks/__init__.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
from src.callbacks.base import CallbackList, Callback
2+
from src.callbacks.model_checkpoint import ModelCheckpoint
3+
from src.callbacks.logging import Logging
4+
5+
6+
__all__ = [
7+
'CallbackList',
8+
'ModelCheckpoint',
9+
'Logging',
10+
]

src/callbacks/base.py

Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
'''
2+
Callback inspritations from PyTorch Lightning - https://github.com/PyTorchLightning/PyTorch-Lightning
3+
and https://github.com/devforfu/pytorch_playground/blob/master/loop.ipynb
4+
'''
5+
6+
import abc
7+
8+
9+
class Callback(abc.ABC):
10+
def setup(self, **kwargs):
11+
"""Called before the training procedure"""
12+
pass
13+
14+
def teardown(self, **kwargs):
15+
"""Called after training procedure"""
16+
pass
17+
18+
def on_epoch_start(self, **kwargs):
19+
"""Called when epoch begins"""
20+
pass
21+
22+
def on_epoch_end(self, **kwargs):
23+
"""Called when epoch terminates"""
24+
pass
25+
26+
def on_train_batch_start(self, **kwargs):
27+
"""Called when training step begins"""
28+
pass
29+
30+
def on_train_batch_end(self, **kwargs):
31+
"""Called when training step ends"""
32+
pass
33+
34+
def on_validation_batch_start(self, **kwargs):
35+
"""Called when validation step begins"""
36+
pass
37+
38+
def on_validation_batch_end(self, **kwargs):
39+
"""Called when validation step ends"""
40+
pass
41+
42+
def on_test_batch_start(self, **kwargs):
43+
"""Called when test batch begins"""
44+
pass
45+
46+
def on_test_batch_end(self, **kwargs):
47+
"""Called when test batch ends"""
48+
pass
49+
50+
def on_train_start(self, **kwargs):
51+
"""Called when training loop begins"""
52+
pass
53+
54+
def on_train_end(self, **kwargs):
55+
"""Called when training loop ends"""
56+
pass
57+
58+
def on_validation_start(self, **kwargs):
59+
"""Called when validation loop begins"""
60+
pass
61+
62+
def on_validation_end(self, **kwargs):
63+
"""Called when validation loop ends"""
64+
pass
65+
66+
def on_test_start(self, **kwargs):
67+
"""Called when test loop begins"""
68+
pass
69+
70+
def on_test_end(self, **kwargs):
71+
"""Called when test loop ends"""
72+
pass
73+
74+
75+
class CallbackList(Callback):
76+
77+
def __init__(self, callbacks):
78+
self.callbacks = callbacks
79+
80+
def setup(self, **kwargs):
81+
"""Called before the training procedure"""
82+
for callback in self.callbacks:
83+
callback.setup(**kwargs)
84+
85+
def teardown(self, **kwargs):
86+
"""Called after training procedure"""
87+
for callback in self.callbacks:
88+
callback.teardown(**kwargs)
89+
90+
def on_epoch_start(self, **kwargs):
91+
"""Called when epoch begins"""
92+
for callback in self.callbacks:
93+
callback.on_epoch_start(**kwargs)
94+
95+
def on_epoch_end(self, **kwargs):
96+
"""Called when epoch terminates"""
97+
for callback in self.callbacks:
98+
callback.on_epoch_end(**kwargs)
99+
100+
def on_train_batch_start(self, **kwargs):
101+
"""Called when training step begins"""
102+
for callback in self.callbacks:
103+
callback.on_train_batch_start(**kwargs)
104+
105+
def on_train_batch_end(self, **kwargs):
106+
"""Called when training step ends"""
107+
for callback in self.callbacks:
108+
callback.on_train_batch_end(**kwargs)
109+
110+
def on_validation_batch_start(self, **kwargs):
111+
"""Called when validation step begins"""
112+
for callback in self.callbacks:
113+
callback.on_validation_batch_start(**kwargs)
114+
115+
def on_validation_batch_end(self, **kwargs):
116+
"""Called when validation step ends"""
117+
for callback in self.callbacks:
118+
callback.on_validation_batch_end(**kwargs)
119+
120+
def on_test_batch_start(self, **kwargs):
121+
"""Called when test batch begins"""
122+
for callback in self.callbacks:
123+
callback.on_test_batch_start(**kwargs)
124+
125+
def on_test_batch_end(self, **kwargs):
126+
"""Called when test batch ends"""
127+
for callback in self.callbacks:
128+
callback.on_test_batch_end(**kwargs)
129+
130+
def on_train_start(self, **kwargs):
131+
"""Called when training loop begins"""
132+
for callback in self.callbacks:
133+
callback.on_train_start(**kwargs)
134+
135+
def on_train_end(self, **kwargs):
136+
"""Called when training loop ends"""
137+
for callback in self.callbacks:
138+
callback.on_train_end(**kwargs)
139+
140+
def on_validation_start(self, **kwargs):
141+
"""Called when validation loop begins"""
142+
for callback in self.callbacks:
143+
callback.on_validation_start(**kwargs)
144+
145+
def on_validation_end(self, **kwargs):
146+
"""Called when validation loop ends"""
147+
for callback in self.callbacks:
148+
callback.on_validation_end(**kwargs)
149+
150+
def on_test_start(self, **kwargs):
151+
"""Called when test loop begins"""
152+
for callback in self.callbacks:
153+
callback.on_test_start(**kwargs)
154+
155+
def on_test_end(self, **kwargs):
156+
"""Called when test loop ends"""
157+
for callback in self.callbacks:
158+
callback.on_test_end(**kwargs)

src/callbacks/logging.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
from src.callbacks.base import Callback
2+
import torch
3+
4+
5+
class Logging(Callback):
6+
"""Logging and printing metrics"""
7+
8+
def setup(self, opt, model, **kwargs):
9+
print(
10+
f'[INFO]: Start training procedure using device: {opt.device}')
11+
# log gradients and parameters of the model during training
12+
if opt.use_wandb:
13+
opt.logger.watch(model, log='all')
14+
15+
def on_train_batch_end(self, opt, batch_idx, batch, dataloader, output, l_ship, l_bbox, **kwargs):
16+
batch_len = len(batch['input'])
17+
dataset_len = len(dataloader.dataset)
18+
n_batches = len(dataloader)
19+
20+
# print to console
21+
print('Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.4f}\tL_ship: {:.4f}\tL_bbox: {:.4f}'.format(
22+
opt.epoch, batch_idx * batch_len,
23+
dataset_len, 100. * batch_idx / n_batches,
24+
output, l_ship, l_bbox),
25+
end='\n')
26+
27+
# log to wandb
28+
if opt.use_wandb:
29+
opt.logger.log({"train_loss": output,
30+
"l_ship": l_ship,
31+
"l_bbox": l_bbox})
32+
33+
def on_validation_end(self, opt, output, metrics, l_ship, l_bbox, **kwargs):
34+
# print and log metrics and loss after validation epoch
35+
print("Valiation - Loss: {:.4f}\tL_ship: {:.4f}\tL_bbox: {:.4f}".format(
36+
output, l_ship, l_bbox), end="\t")
37+
38+
for k in metrics.keys():
39+
print(f"{k}: {metrics[k]}", end="\t")
40+
if opt.use_wandb:
41+
opt.logger.log(metrics, commit=False)
42+
opt.logger.log({"val_loss": output,
43+
"epoch": opt.epoch,
44+
"val_l_ship": l_ship,
45+
"val_l_bbox": l_bbox})
46+
print("")
47+
48+
def on_epoch_end(self, opt, optimizer, **kwargs):
49+
lr = optimizer.param_groups[0]['lr']
50+
if opt.use_wandb:
51+
opt.logger.log({f"LR": lr})
52+
print("lr @ ", lr)

0 commit comments

Comments
 (0)