Skip to content

Commit 7441c5d

Browse files
authored
Merge pull request #1 from jeffng-or/unit-test
added unit and flow tests
2 parents d5d7529 + 7125a37 commit 7441c5d

18 files changed

+1345
-835
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,5 @@
11
utils/__pycache__
22
results
3+
*~
4+
.coverage
5+
htmlcov

__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# To get relative imports to work
2+
import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))

run.py

100644100755
Lines changed: 37 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
#!/usr/bin/env python3
22

3+
import os
34
import sys
45
import json
56
import argparse
7+
from pathlib import Path
68

79
from utils.class_process import Process
810
from utils.class_memory import Memory
911

1012
from utils.create_lib import create_lib
1113
from utils.create_lef import create_lef
1214
from utils.create_verilog import create_verilog
13-
#from utils.generate_verilog import generate_verilog_bb
1415

1516
################################################################################
1617
# RUN GENERATOR
@@ -21,6 +22,7 @@
2122
# found in the JSON configuration file.
2223
################################################################################
2324

25+
2426
def get_args() -> argparse.Namespace:
2527
"""
2628
Get command line arguments
@@ -34,29 +36,46 @@ def get_args() -> argparse.Namespace:
3436
)
3537
parser.add_argument("config", help="JSON configuration file")
3638
parser.add_argument(
37-
"--output_dir", action="store", help="Output directory ", required=False, default=None
39+
"--output_dir",
40+
action="store",
41+
help="Output directory ",
42+
required=False,
43+
default=None,
3844
)
3945
return parser.parse_args()
4046

4147

42-
def main ( args : argparse.Namespace):
48+
def ensure_results_dir(output_dir, memory_name):
49+
if output_dir: # Output dir was set by command line option
50+
p = str(Path(output_dir).expanduser().resolve(strict=False))
51+
results_dir = os.sep.join([p, memory_name])
52+
else:
53+
results_dir = os.sep.join([os.getcwd(), "results", memory_name])
54+
if not os.path.exists(results_dir):
55+
os.makedirs(results_dir)
56+
return results_dir
57+
58+
59+
def main(args: argparse.Namespace):
60+
61+
# Load the JSON configuration file
62+
with open(args.config, "r") as fid:
63+
raw = [line.strip() for line in fid if not line.strip().startswith("#")]
64+
json_data = json.loads("\n".join(raw))
4365

44-
# Load the JSON configuration file
45-
with open(args.config, 'r') as fid:
46-
raw = [line.strip() for line in fid if not line.strip().startswith('#')]
47-
json_data = json.loads('\n'.join(raw))
66+
# Create a process object (shared by all srams)
67+
process = Process(json_data)
4868

49-
# Create a process object (shared by all srams)
50-
process = Process(json_data)
69+
# Go through each sram and generate the lib, lef and v files
70+
for sram_data in json_data["srams"]:
71+
memory = Memory(process, sram_data)
72+
results_dir = ensure_results_dir(args.output_dir, memory.name)
73+
create_lib(memory, results_dir)
74+
create_lef(memory, results_dir)
75+
create_verilog(memory, results_dir)
5176

52-
# Go through each sram and generate the lib, lef and v files
53-
for sram_data in json_data['srams']:
54-
memory = Memory(process, sram_data, args.output_dir)
55-
create_lib(memory)
56-
create_lef(memory)
57-
create_verilog(memory)
5877

5978
### Entry point
60-
if __name__ == '__main__':
61-
args = get_args()
62-
main( args )
79+
if __name__ == "__main__":
80+
args = get_args()
81+
main(args)

test/.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
*~
2+
#*#
3+
.#*
4+
__pycache__
5+
results

test/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# To get relative imports to work
2+
import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))
3+

test/flow_test.py

Lines changed: 177 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,177 @@
1+
#!/usr/bin/env python3
2+
3+
import re
4+
import os
5+
import math
6+
import shutil
7+
import unittest
8+
import subprocess
9+
10+
11+
class FlowTest(unittest.TestCase):
12+
""" Flow test for fakeram """
13+
14+
def setUp(self):
15+
"""Sets up paths and REs to validate results"""
16+
17+
self._test_dir = os.path.abspath(os.path.dirname(__file__));
18+
self._script_dir = os.path.abspath(os.path.join(self._test_dir, ".."))
19+
self._exec = os.path.join(self._script_dir, "run.py")
20+
self._results_dir = os.path.join(self._test_dir, "results")
21+
if os.path.isdir(self._results_dir):
22+
shutil.rmtree(self._results_dir)
23+
self._macro_re = re.compile("^MACRO\s+(\S+)")
24+
self._size_re = re.compile("^\s+SIZE\s+(\S+)\s+BY\s+(\S+)")
25+
self._start_pin_re = re.compile("^\s+PIN\s+(\S+)")
26+
self._macro_name_re = re.compile("\S+_(\d+)x(\d+)")
27+
28+
def _getLefPin(self, fh, pin_name):
29+
""" Extracts data for given LEF pin and returns it in a dict """
30+
31+
pin_data = {}
32+
end_line = " END " + pin_name
33+
while True:
34+
line = fh.readline()
35+
if not line: # pragma: no cover
36+
return pin_data
37+
line = line.rstrip()
38+
if line == end_line:
39+
return pin_data
40+
elif line == " PORT":
41+
line = fh.readline()
42+
pin_data["layer"] = line.split()[1]
43+
line = fh.readline()
44+
pin_data["rect"] = list(map(float, line.split()[1:-1]))
45+
46+
def _getLefData(self, ram_file):
47+
""" Extracts data from LEF and returns it in a dict """
48+
49+
lef_data = {"pins": {}}
50+
with open(ram_file, "r") as fh:
51+
for line in fh:
52+
result = self._macro_re.match(line)
53+
if result:
54+
lef_data["macro_name"] = result.group(1)
55+
else:
56+
result = self._size_re.match(line)
57+
if result:
58+
lef_data["width"] = result.group(1)
59+
lef_data["height"] = result.group(2)
60+
else:
61+
result = self._start_pin_re.match(line)
62+
if result:
63+
pin_name = result.group(1)
64+
lef_data["pins"][pin_name] = self._getLefPin(fh, pin_name)
65+
return lef_data
66+
67+
def _checkPinShape(self, pin_name, pin_data, current_x, current_y, pin_width,
68+
layer_name):
69+
self.assertEqual(pin_data["layer"], layer_name,
70+
f"{pin_name}'s layer is not {layer_name}: {pin_data['layer']}")
71+
expected_rect = [ current_x, round(current_y, 3), current_x + pin_width, round(current_y + pin_width, 3) ]
72+
self.assertListEqual(expected_rect, pin_data["rect"],
73+
f"{pin_name}'s rect is not {expected_rect}: {pin_data['rect']}")
74+
75+
def _checkLef(self, ram_file, mem_width, mem_depth):
76+
""" Checks the LEF data against expected values for a given RAM """
77+
78+
lef_data = self._getLefData(ram_file)
79+
self.assertEqual(lef_data["macro_name"], "fakeram7_2048x39")
80+
self.assertEqual(lef_data["width"], "20.330")
81+
self.assertEqual(lef_data["height"], "166.600")
82+
pin_layer = "M4"
83+
pin_width = 0.024 # from config
84+
pin_spacing = 1.776 # config pin_pitch * calculated track count
85+
pin_group_spacing = 1.008 # config pin pitch * extra tracks
86+
read_pin_ct = write_pin_ct = mem_depth
87+
write_enable_ct = read_enable_ct = clk_ct = 1
88+
addr_pin_ct = math.log2(mem_width)
89+
power_pin_ct = 2
90+
total_pin_ct = read_pin_ct + write_pin_ct + write_enable_ct + read_enable_ct + clk_ct + addr_pin_ct + power_pin_ct
91+
self.assertEqual(len(lef_data["pins"].keys()), total_pin_ct)
92+
read_pins = [key for key in lef_data["pins"] if key.startswith("rd_out")]
93+
write_pins = [key for key in lef_data["pins"] if key.startswith("wd_in")]
94+
addr_pins = [key for key in lef_data["pins"] if key.startswith("addr_in")]
95+
self.assertEqual(len(read_pins), read_pin_ct)
96+
self.assertEqual(len(write_pins), write_pin_ct)
97+
self.assertEqual(len(addr_pins), addr_pin_ct)
98+
ct = 0
99+
current_x = 0
100+
current_y = pin_width * 2
101+
for pin_name in read_pins:
102+
self._checkPinShape(pin_name, lef_data["pins"][pin_name], current_x,
103+
current_y, pin_width, pin_layer)
104+
ct += 1
105+
current_y += pin_spacing
106+
current_y += pin_group_spacing
107+
for pin_name in write_pins:
108+
self._checkPinShape(pin_name, lef_data["pins"][pin_name], current_x,
109+
current_y, pin_width, pin_layer)
110+
ct += 1
111+
current_y += pin_spacing
112+
current_y += pin_group_spacing
113+
for pin_name in addr_pins:
114+
self._checkPinShape(pin_name, lef_data["pins"][pin_name], current_x,
115+
current_y, pin_width, pin_layer)
116+
current_y += pin_spacing
117+
current_y += pin_group_spacing
118+
self._checkPinShape("we_in", lef_data["pins"]["we_in"], current_x,
119+
current_y, pin_width, pin_layer)
120+
current_y += pin_spacing
121+
self._checkPinShape("ce_in", lef_data["pins"]["ce_in"], current_x,
122+
current_y, pin_width, pin_layer)
123+
current_y += pin_spacing
124+
self._checkPinShape("clk", lef_data["pins"]["clk"], current_x,
125+
current_y, pin_width, pin_layer)
126+
current_y += pin_spacing
127+
# Skip checking power pins
128+
129+
def _checkResultsDir(self):
130+
""" Checks that the expected RAMs were generated """
131+
expected_ram_list = [
132+
"fakeram7_2048x39",
133+
"fakeram7_256x32",
134+
"fakeram7_256x34",
135+
"fakeram7_64x21",
136+
"fakeram_256x128",
137+
"fakeram_256x64",
138+
"fakeram_32x46",
139+
"fakeram_512x8",
140+
"fakeram_64x20",
141+
"fakeram_64x22",
142+
]
143+
self.assertTrue(os.path.isdir(self._results_dir))
144+
self.assertListEqual(sorted(os.listdir(self._results_dir)),
145+
expected_ram_list)
146+
ct = 0
147+
for ram_name in expected_ram_list:
148+
lef_file = os.path.join(self._results_dir, ram_name,
149+
ram_name + ".lef")
150+
verilog_file = os.path.join(self._results_dir, ram_name,
151+
ram_name + ".v")
152+
liberty_file = os.path.join(self._results_dir, ram_name,
153+
ram_name + ".lib")
154+
self.assertTrue(os.path.exists(lef_file),
155+
f"{lef_file} doesn't exist")
156+
self.assertTrue(os.path.exists(verilog_file),
157+
f"{verilog_file} doesn't exist")
158+
self.assertTrue(os.path.exists(liberty_file),
159+
f"{liberty_file} doesn't exist")
160+
if ct == 0:
161+
results = self._macro_name_re.match(ram_name)
162+
mem_width = int(results.group(1))
163+
mem_depth = int(results.group(2))
164+
self._checkLef(lef_file, mem_width, mem_depth)
165+
ct += 1
166+
167+
def test_example_input(self):
168+
""" Tests the example input run """
169+
170+
cmd = self._exec + " " + os.path.join(self._script_dir, "example_input_file.cfg") + " --output_dir " + self._results_dir
171+
out = subprocess.run(cmd, check=True, shell=True)
172+
self.assertEqual(out.returncode, 0)
173+
self._checkResultsDir()
174+
175+
if __name__ == "__main__":
176+
unittest.main()
177+

test/macro_dim_test.py

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env python3
2+
3+
import os
4+
import sys
5+
import unittest
6+
7+
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "utils")))
8+
from class_process import Process
9+
from test_utils import TestUtils
10+
import area
11+
12+
13+
class MacroDimTest(unittest.TestCase):
14+
"""Unit test for get_macro_dimensions method"""
15+
16+
def setUp(self):
17+
"""Sets up process object used by test methods"""
18+
19+
self._process = Process(TestUtils.get_base_process_data())
20+
# delta for use when comparing floats
21+
self._delta = 0.01
22+
23+
def test_macro_dim(self):
24+
"""
25+
Tests basic macro dimension calculation based on three bank
26+
configurations
27+
"""
28+
29+
banks = [1, 2, 4]
30+
base_height = 663.552
31+
base_width = 5.054
32+
for num_banks in banks:
33+
sram_data = {
34+
"width": 39,
35+
"depth": 2048,
36+
"banks": num_banks,
37+
}
38+
(height, width) = area.get_macro_dimensions(self._process, sram_data)
39+
exp_height = base_height / num_banks
40+
exp_width = base_width * num_banks
41+
self.assertAlmostEqual(height, exp_height, delta=self._delta)
42+
self.assertAlmostEqual(width, exp_width, delta=self._delta)
43+
44+
def test_macro_dim_invalid_banks(self):
45+
"""Tests detection that an invalid bank value was given"""
46+
47+
sram_data = {
48+
"width": 39,
49+
"depth": 2048,
50+
"banks": 8,
51+
}
52+
with self.assertRaises(Exception):
53+
(height, width) = area.get_macro_dimensions(self._process, sram_data)
54+
55+
56+
if __name__ == "__main__":
57+
unittest.main()

0 commit comments

Comments
 (0)