Initial version of tileinfo.

tileconn quality is not usable yet, still iterating.

Signed-off-by: Keith Rothman <537074+litghost@users.noreply.github.com>
diff --git a/fuzzers/004-tileinfo/Makefile b/fuzzers/004-tileinfo/Makefile
new file mode 100644
index 0000000..ad39d58
--- /dev/null
+++ b/fuzzers/004-tileinfo/Makefile
@@ -0,0 +1,42 @@
+
+N := 1
+BUILD_DIR = build_${URAY_PART}
+SPECIMENS := $(addprefix $(BUILD_DIR)/specimen_,$(shell seq -f '%03.0f' $(N)))
+SPECIMENS_OK := $(addsuffix /OK,$(SPECIMENS))
+MAX_VIVADO_PROCESS ?= 4
+MAX_TILES_INSTANCE ?= 300
+MAX_NODES_INSTANCE ?= 30000
+
+database: $(SPECIMENS_OK)
+	true
+
+pushdb:
+	cp $(BUILD_DIR)/output/tile_type_*.json ${URAY_FAMILY_DIR}/
+	rm ${URAY_FAMILY_DIR}/tile_type_*_site_type_*.json
+	cp $(BUILD_DIR)/output/site_type_*.json ${URAY_FAMILY_DIR}/
+	cp $(BUILD_DIR)/output/tileconn.json ${URAY_FAMILY_DIR}/$(URAY_PART)/
+
+$(SPECIMENS_OK):
+	bash generate.sh $(subst /OK,,$@) -p=$(MAX_VIVADO_PROCESS) -t=$(MAX_TILES_INSTANCE) -n=$(MAX_NODES_INSTANCE)
+	touch $@
+
+run:
+	rm -rf $(BUILD_DIR) run.${URAY_PART}.ok
+	$(MAKE) database
+	$(MAKE) pushdb
+	# Clean up intermediate files after successful pushdb.
+	find $(BUILD_DIR) -name "*.json5" -delete
+	touch run.${URAY_PART}.ok
+
+clean:
+	rm -rf build_* run.*.ok
+
+update_ignored_wires:
+	mkdir -p ignored_wires/${URAY_DATABASE}
+	python3 analyze_errors.py \
+		--error_nodes ${BUILD_DIR}/output/error_nodes.json \
+		--output_ignore_list | sort \
+		> ignored_wires/${URAY_DATABASE}/${URAY_PART}_ignored_wires.txt
+
+.PHONY: database pushdb run clean
+
diff --git a/fuzzers/004-tileinfo/analyze_errors.py b/fuzzers/004-tileinfo/analyze_errors.py
new file mode 100644
index 0000000..061c9e5
--- /dev/null
+++ b/fuzzers/004-tileinfo/analyze_errors.py
@@ -0,0 +1,70 @@
+import json
+import argparse
+
+
+def main():
+    parser = argparse.ArgumentParser(description="")
+
+    parser.add_argument('--error_nodes', default='output/error_nodes.json')
+    parser.add_argument('--output_ignore_list', action='store_true')
+
+    args = parser.parse_args()
+
+    with open(args.error_nodes) as f:
+        flat_error_nodes = json.load(f)
+
+    error_nodes = {}
+    for node, raw_node, generated_nodes in flat_error_nodes:
+        if node not in error_nodes:
+            error_nodes[node] = {
+                'raw_node': set(raw_node),
+                'generated_nodes': set(),
+            }
+
+        assert error_nodes[node]['raw_node'] == set(raw_node)
+        error_nodes[node]['generated_nodes'].add(
+            tuple(sorted(generated_nodes)))
+
+    ignored_wires = set()
+
+    for node, error in error_nodes.items():
+        combined_generated_nodes = set()
+        for generated_node in error['generated_nodes']:
+            combined_generated_nodes |= set(generated_node)
+
+        # Make sure there are not extra wires in nodes.
+        assert error['raw_node'] == combined_generated_nodes, (node, error)
+
+        good_node = max(error['generated_nodes'], key=lambda x: len(x))
+        bad_nodes = error['generated_nodes'] - set((good_node, ))
+
+        if args.output_ignore_list:
+            for generated_node in bad_nodes:
+                for wire in generated_node:
+                    ignored_wires.add(wire)
+
+            continue
+
+        if max(len(generated_node) for generated_node in bad_nodes) > 1:
+            assert False, node
+        else:
+            not_pcie = False
+            for generated_node in bad_nodes:
+                for wire in generated_node:
+                    if not wire.startswith('PCIE'):
+                        not_pcie = True
+            if not_pcie:
+                #print(node, good_node, map(tuple, bad_nodes))
+                print(repr((node, tuple(map(tuple, bad_nodes)))))
+                pass
+            else:
+                #print(repr((node, map(tuple, bad_nodes))))
+                pass
+
+    if args.output_ignore_list:
+        for wire in ignored_wires:
+            print(wire)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/fuzzers/004-tileinfo/cleanup_site_pins.py b/fuzzers/004-tileinfo/cleanup_site_pins.py
new file mode 100644
index 0000000..abda227
--- /dev/null
+++ b/fuzzers/004-tileinfo/cleanup_site_pins.py
@@ -0,0 +1,119 @@
+""" Tool to cleanup site pins JSON dumps.
+
+This tool has two behaviors.  This first is to rename site names from global
+coordinates to site local coordinates.  The second is remove the tile prefix
+from node names.
+
+For example CLBLM_L_X8Y149 contains two sites named SLICE_X10Y149 and
+SLICE_X11Y149. SLICE_X10Y149 becomes X0Y0 and SLICE_X11Y149 becomes X1Y0.
+"""
+
+from __future__ import print_function
+import json
+import json5
+import re
+import sys
+import copy
+
+# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
+# Generally speaking, only the tile relatively coordinates are required to
+# assemble arch defs, so we re-origin the coordinates to be relative to the tile
+# (e.g. start at X0Y0) and discard the prefix from the name.
+SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
+
+
+def find_origin_coordinate(sites):
+    """ Find the coordinates of each site within the tile, and then subtract the
+      smallest coordinate to re-origin them all to be relative to the tile.
+  """
+
+    if len(sites) == 0:
+        return 0, 0
+
+    def inner_():
+        for site in sites:
+            coordinate = SITE_COORDINATE_PATTERN.match(site['name'])
+            assert coordinate is not None, site
+
+            x_coord = int(coordinate.group(2))
+            y_coord = int(coordinate.group(3))
+            yield x_coord, y_coord
+
+    x_coords, y_coords = zip(*inner_())
+    min_x_coord = min(x_coords)
+    min_y_coord = min(y_coords)
+
+    return min_x_coord, min_y_coord
+
+
+def create_site_pin_to_wire_maps(tile_name, nodes):
+    """ Create a map from site_pin names to nodes.
+
+  Create a mapping from site pins to tile local wires.  For each node that is
+  attached to a site pin, there should only be 1 tile local wire.
+
+  """
+
+    # Remove tile prefix (e.g. CLBLM_L_X8Y149/) from node names.
+    # Routing resources will not have the prefix.
+    tile_prefix = tile_name + '/'
+    site_pin_to_wires = {}
+
+    for node in nodes:
+        if len(node['site_pins']) == 0:
+            continue
+
+        wire_names = [
+            wire for wire in node['wires'] if wire.startswith(tile_prefix)
+        ]
+        assert len(wire_names) == 1, (node, tile_prefix)
+
+        for site_pin in node["site_pins"]:
+            assert site_pin not in site_pin_to_wires
+            site_pin_to_wires[site_pin] = wire_names[0]
+
+    return site_pin_to_wires
+
+
+def main():
+    site_pins = json5.load(sys.stdin)
+
+    output_site_pins = {}
+    output_site_pins["tile_type"] = site_pins["tile_type"]
+    output_site_pins["sites"] = copy.deepcopy(site_pins["sites"])
+
+    site_pin_to_wires = create_site_pin_to_wire_maps(site_pins['tile_name'],
+                                                     site_pins['nodes'])
+    min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites'])
+
+    for site in output_site_pins['sites']:
+        orig_site_name = site['name']
+        coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
+
+        x_coord = int(coordinate.group(2))
+        y_coord = int(coordinate.group(3))
+        site['name'] = 'X{}Y{}'.format(x_coord - min_x_coord,
+                                       y_coord - min_y_coord)
+        site['prefix'] = coordinate.group(1)
+        site['x_coord'] = x_coord - min_x_coord
+        site['y_coord'] = y_coord - min_y_coord
+
+        for site_pin in site['site_pins']:
+            assert site_pin['name'].startswith(orig_site_name + '/')
+            if site_pin['name'] in site_pin_to_wires:
+                site_pin['wire'] = site_pin_to_wires[site_pin['name']]
+            else:
+                print(
+                    ('***WARNING***: Site pin {} for tile type {} is not connected, '
+                     'make sure all instaces of this tile type has this site_pin '
+                     'disconnected.').format(site_pin['name'],
+                                             site_pins['tile_type']),
+                    file=sys.stderr)
+
+            site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:]
+
+    json.dumps(output_site_pins, indent=2, sort_keys=True)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/fuzzers/004-tileinfo/create_node_tree.py b/fuzzers/004-tileinfo/create_node_tree.py
new file mode 100644
index 0000000..ec569b0
--- /dev/null
+++ b/fuzzers/004-tileinfo/create_node_tree.py
@@ -0,0 +1,278 @@
+import argparse
+import datetime
+import progressbar
+import os.path
+import utils.lib
+import pickle
+import collections
+import json
+
+
+def build_node_index(fname):
+    node_index = {}
+    with open(fname, 'rb') as f:
+        f.seek(0, 2)
+        bytes = f.tell()
+        f.seek(0, 0)
+        with progressbar.ProgressBar(max_value=bytes) as bar:
+            end_of_line = 0
+            for l in f:
+                parts = l.decode('utf8').split(' ')
+                pip, node = parts[0:2]
+
+                if node not in node_index:
+                    node_index[node] = []
+
+                node_index[node].append(end_of_line)
+                end_of_line = f.tell()
+                bar.update(end_of_line)
+
+    return node_index
+
+
+def read_node(expected_node, wire_file, node_index):
+    with open(wire_file, 'rb') as f:
+        for index in node_index:
+            f.seek(index, 0)
+
+            parts = f.readline().decode('utf8').strip().split(' ')
+
+            pip, node = parts[0:2]
+            wires = parts[2:]
+
+            assert node == expected_node, repr((node, expected_node, index))
+
+            yield wires
+
+
+def generate_edges(graph, root, graph_nodes):
+    """ Starting from root, generate an edge in dir and insert into graph.
+
+  If the tree forks, simply insert a joins to indicate the split.
+
+  """
+    edge = [root]
+    prev_root = None
+
+    while True:
+        outbound_edges = graph_nodes[root]
+        outbound_edges -= set((prev_root, ))
+        if len(outbound_edges) > 1:
+            graph['edges'].append(edge)
+            if root not in graph['joins']:
+                graph['joins'][root] = set()
+            graph['joins'][root] |= outbound_edges
+
+            for element in graph_nodes[root]:
+                if element not in graph['joins']:
+                    graph['joins'][element] = set()
+                graph['joins'][element].add(root)
+
+            break
+        else:
+            if len(outbound_edges) == 0:
+                graph['edges'].append(edge)
+                break
+
+            next_root = tuple(outbound_edges)[0]
+            edge.append(next_root)
+            prev_root, root = root, next_root
+
+
+def create_ordered_wires_for_node(node, wires_in_node, downhill, uphill):
+    if len(wires_in_node) <= 2:
+        return {'edges': [wires_in_node], 'joins': {}}
+
+    downhill = set(tuple(l) for l in downhill)
+    uphill = set(tuple(l) for l in uphill)
+
+    roots = set()
+    all_wires = set()
+
+    for wire in downhill:
+        if len(wire) > 0:
+            roots |= set((wire[0], wire[-1]))
+            all_wires |= set(wire)
+
+    for wire in uphill:
+        if len(wire) > 0:
+            roots |= set((wire[0], wire[-1]))
+            all_wires |= set(wire)
+
+    assert len(wires_in_node) >= len(all_wires), (len(wires_in_node),
+                                                  len(all_wires))
+
+    if len(all_wires) <= 2:
+        return {'edges': tuple(all_wires), 'joins': {}}
+
+    graph_nodes = dict((wire, set()) for wire in all_wires)
+
+    for wire in all_wires:
+        for down in downhill:
+            try:
+                idx = down.index(wire)
+                if idx + 1 < len(down):
+                    graph_nodes[wire].add(down[idx + 1])
+                if idx - 1 >= 0:
+                    graph_nodes[wire].add(down[idx - 1])
+            except ValueError:
+                continue
+
+        for up in uphill:
+            try:
+                idx = up.index(wire)
+                if idx + 1 < len(up):
+                    graph_nodes[wire].add(up[idx + 1])
+                if idx - 1 >= 0:
+                    graph_nodes[wire].add(up[idx - 1])
+            except ValueError:
+                continue
+
+    graph = {'edges': [], 'joins': {}}
+
+    while len(roots) > 0:
+        root = roots.pop()
+
+        if len(graph_nodes[root]) > 0:
+            generate_edges(graph, root, graph_nodes)
+
+    # Dedup identical edges.
+    final_edges = set()
+
+    for edge in graph['edges']:
+        edge1 = tuple(edge)
+        edge2 = tuple(edge[::-1])
+
+        if edge1 > edge2:
+            final_edges.add((edge2, edge1))
+        else:
+            final_edges.add((edge1, edge2))
+
+    edges = [edge[0] for edge in final_edges]
+
+    element_index = {}
+    for edge in edges:
+        for idx, element in enumerate(edge):
+            if element not in element_index:
+                element_index[element] = []
+            element_index[element].append((idx, edge))
+
+    new_edges = []
+    for edge in edges:
+        starts = element_index[edge[0]]
+        ends = element_index[edge[-1]]
+
+        found_any = False
+        for start in starts:
+            start_idx, other_edge = start
+            if other_edge is edge:
+                continue
+
+            for end in ends:
+                if other_edge is not end[1]:
+                    continue
+
+                found_any = True
+                end_idx, _ = end
+                # check if the interior elements are the same.
+                if start_idx > end_idx:
+                    step = -1
+                else:
+                    step = 1
+
+                other_edge_slice = slice(
+                    start_idx, end_idx + step if end_idx + step >= 0 else None,
+                    step)
+                if edge != other_edge[other_edge_slice]:
+                    new_edges.append(edge)
+
+        if not found_any:
+            new_edges.append(edge)
+
+    output = {
+        'edges':
+        new_edges,
+        'joins':
+        dict((key, tuple(value)) for key, value in graph['joins'].items()),
+        'wires':
+        wires_in_node,
+    }
+
+    all_wires_in_output = set()
+    for edge in output['edges']:
+        all_wires_in_output |= set(edge)
+
+    for element in output['joins']:
+        all_wires_in_output.add(element)
+
+    return output
+
+
+def main():
+    parser = argparse.ArgumentParser(description="")
+    parser.add_argument('--dump_all_root_dir', required=True)
+    parser.add_argument('--ordered_wires_root_dir', required=True)
+    parser.add_argument('--output_dir', required=True)
+
+    args = parser.parse_args()
+
+    downhill_wires = os.path.join(args.ordered_wires_root_dir,
+                                  'downhill_wires.txt')
+    uphill_wires = os.path.join(args.ordered_wires_root_dir,
+                                'uphill_wires.txt')
+
+    assert os.path.exists(downhill_wires)
+    assert os.path.exists(uphill_wires)
+
+    print('{} Reading root.csv'.format(datetime.datetime.now()))
+    tiles, nodes = utils.lib.read_root_csv(args.dump_all_root_dir)
+
+    print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
+    node_lookup = utils.lib.NodeLookup()
+    node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
+    if os.path.exists(node_lookup_file):
+        node_lookup.load_from_file(node_lookup_file)
+    else:
+        node_lookup.load_from_root_csv(nodes)
+        node_lookup.save_to_file(node_lookup_file)
+
+    wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle')
+    if os.path.exists(wire_index_file):
+        print('{} Reading wire<->node index'.format(datetime.datetime.now()))
+        with open(wire_index_file, 'rb') as f:
+            wire_index = pickle.load(f)
+
+        downhill_wire_node_index = wire_index['downhill']
+        uphill_wire_node_index = wire_index['uphill']
+    else:
+        print('{} Creating wire<->node index'.format(datetime.datetime.now()))
+        downhill_wire_node_index = build_node_index(downhill_wires)
+        uphill_wire_node_index = build_node_index(uphill_wires)
+
+        with open(wire_index_file, 'wb') as f:
+            pickle.dump({
+                'downhill': downhill_wire_node_index,
+                'uphill': uphill_wire_node_index,
+            }, f)
+
+    print('{} Creating node tree'.format(datetime.datetime.now()))
+    nodes = collections.OrderedDict()
+    for node in progressbar.progressbar(sorted(node_lookup.nodes)):
+        nodes[node] = create_ordered_wires_for_node(
+            node, tuple(wire['wire'] for wire in node_lookup.nodes[node]),
+            tuple(
+                read_node(
+                    node, downhill_wires, downhill_wire_node_index[node]
+                    if node in downhill_wire_node_index else [])),
+            tuple(
+                read_node(
+                    node, uphill_wires, uphill_wire_node_index[node]
+                    if node in uphill_wire_node_index else [])))
+
+    print('{} Writing node tree'.format(datetime.datetime.now()))
+    with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f:
+        json.dump(nodes, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/fuzzers/004-tileinfo/generate.sh b/fuzzers/004-tileinfo/generate.sh
new file mode 100644
index 0000000..4773d0c
--- /dev/null
+++ b/fuzzers/004-tileinfo/generate.sh
@@ -0,0 +1,7 @@
+#!/bin/bash -x
+
+source ${URAY_GENHEADER}
+
+python3 $FUZDIR/run_fuzzer.py $2 $3 $4
+
+cd $FUZDIR && ./generate_after_dump.sh
diff --git a/fuzzers/004-tileinfo/generate_after_dump.sh b/fuzzers/004-tileinfo/generate_after_dump.sh
new file mode 100755
index 0000000..b8f96b4
--- /dev/null
+++ b/fuzzers/004-tileinfo/generate_after_dump.sh
@@ -0,0 +1,21 @@
+#!/bin/bash -xe
+
+# By default use ~50 GiB for generate_grid.py, but allow override.
+export DEFAULT_MAX_GRID_CPU=10
+
+export BUILD_DIR=build_${URAY_PART}
+rm -rf ${BUILD_DIR}/output
+mkdir -p ${BUILD_DIR}/output
+python3 reduce_tile_types.py \
+  --root_dir ${BUILD_DIR}/specimen_001/ \
+  --output_dir ${BUILD_DIR}/output
+python3 create_node_tree.py \
+  --dump_all_root_dir ${BUILD_DIR}/specimen_001/ \
+  --ordered_wires_root_dir ../003-ordered_wires/${BUILD_DIR}/specimen_001/ \
+  --output_dir ${BUILD_DIR}/output
+python3 reduce_site_types.py --output_dir ${BUILD_DIR}/output
+python3 generate_grid.py \
+  --root_dir ${BUILD_DIR}/specimen_001/ \
+  --output_dir ${BUILD_DIR}/output \
+  --ignored_wires ignored_wires/${URAY_DATABASE}/${URAY_PART}_ignored_wires.txt \
+  --max_cpu=${MAX_GRID_CPU:-${DEFAULT_MAX_GRID_CPU}}
diff --git a/fuzzers/004-tileinfo/generate_grid.py b/fuzzers/004-tileinfo/generate_grid.py
new file mode 100644
index 0000000..911c30f
--- /dev/null
+++ b/fuzzers/004-tileinfo/generate_grid.py
@@ -0,0 +1,928 @@
+""" Generate grid from database dump """
+
+from __future__ import print_function
+import argparse
+import pyjson5 as json5
+import multiprocessing
+import progressbar
+import os.path
+import json
+import datetime
+import pickle
+import sys
+import copy
+
+from utils import util, lib
+from utils.xjson import extract_numbers
+
+
+def get_tile_grid_info(fname):
+    with open(fname, 'r') as f:
+        tile = json5.load(f)
+
+    tile_type = tile['type']
+
+    return {
+        tile['tile']: {
+            'type': tile_type,
+            'grid_x': tile['x'],
+            'grid_y': tile['y'],
+            'sites': dict(
+                (site['site'], site['type']) for site in tile['sites']),
+            'wires': set((wire['wire'] for wire in tile['wires']))
+        },
+    }
+
+
+def read_json5(fname):
+    with open(fname, 'r') as f:
+        return json5.load(f)
+
+
+def is_edge_shared(edge1, edge2):
+    """ Returns true if edge1 or edge2 overlap
+
+  >>> is_edge_shared((0, 1), (0, 1))
+  True
+  >>> is_edge_shared((0, 2), (0, 1))
+  True
+  >>> is_edge_shared((0, 1), (0, 2))
+  True
+  >>> is_edge_shared((1, 2), (0, 3))
+  True
+  >>> is_edge_shared((0, 3), (1, 2))
+  True
+  >>> is_edge_shared((1, 2), (0, 2))
+  True
+  >>> is_edge_shared((0, 2), (1, 2))
+  True
+  >>> is_edge_shared((0, 2), (1, 3))
+  True
+  >>> is_edge_shared((1, 3), (0, 2))
+  True
+  >>> is_edge_shared((0, 1), (1, 2))
+  False
+  >>> is_edge_shared((1, 2), (0, 1))
+  False
+  >>> is_edge_shared((0, 1), (2, 3))
+  False
+  >>> is_edge_shared((2, 3), (0, 1))
+  False
+  """
+    assert edge1[0] < edge1[1], edge1
+    assert edge2[0] < edge2[1], edge2
+
+    if edge1[0] <= edge2[0]:
+        return edge2[0] < edge1[1]
+    else:
+        return edge1[0] < edge2[1]
+
+
+def share_edge(a, b):
+    """ Returns true if box defined by a and b share any edge.
+
+  Box is defined as (x-min, y-min, x-max, y-max).
+
+  >>> share_edge((0, 0, 1, 1), (1, 0, 2, 1))
+  True
+  >>> share_edge((1, 0, 2, 1), (0, 0, 1, 1))
+  True
+  >>> share_edge((0, 0, 1, 1), (0, 1, 1, 2))
+  True
+  >>> share_edge((0, 1, 1, 2), (0, 0, 1, 1))
+  True
+  >>> share_edge((0, 0, 1, 3), (1, 0, 2, 1))
+  True
+  >>> share_edge((1, 0, 2, 1), (0, 0, 1, 3))
+  True
+  >>> share_edge((0, 0, 3, 1), (0, 1, 1, 2))
+  True
+  >>> share_edge((0, 1, 1, 2), (0, 0, 3, 1))
+  True
+  >>> share_edge((0, 0, 1, 1), (1, 1, 2, 2))
+  False
+  >>> share_edge((1, 1, 2, 2), (0, 0, 1, 1))
+  False
+  >>> share_edge((0, 0, 1, 3), (1, 3, 2, 4))
+  False
+  >>> share_edge((0, 0, 1, 3), (1, 2, 2, 4))
+  True
+  """
+
+    a_x_min, a_y_min, a_x_max, a_y_max = a
+    b_x_min, b_y_min, b_x_max, b_y_max = b
+
+    if a_x_min == b_x_max or a_x_max == b_x_min:
+        return is_edge_shared((a_y_min, a_y_max), (b_y_min, b_y_max))
+    if a_y_min == b_y_max or a_y_max == b_y_min:
+        return is_edge_shared((a_x_min, a_x_max), (b_x_min, b_x_max))
+
+
+def next_wire_in_dimension(wire1, tile1, wire2, tile2, tiles, x_wires, y_wires,
+                           wire_map, wires_in_node):
+    """ next_wire_in_dimension returns true if tile1 and tile2 are in the same
+  row and column, and must be adjcent.
+  """
+    tile1_info = tiles[tile1]
+    tile2_info = tiles[tile2]
+
+    tile1_x = tile1_info['grid_x']
+    tile2_x = tile2_info['grid_x']
+    tile1_y = tile1_info['grid_y']
+    tile2_y = tile2_info['grid_y']
+
+    # All wires are in the same row or column or if the each wire lies in its own
+    # row or column.
+    if len(y_wires) == 1 or len(x_wires) == len(wires_in_node) or abs(
+            tile1_y - tile2_y) == 0:
+        ordered_wires = sorted(x_wires.keys())
+
+        idx1 = ordered_wires.index(tile1_x)
+        idx2 = ordered_wires.index(tile2_x)
+
+        if len(x_wires[tile1_x]) == 1 and len(x_wires[tile2_x]) == 1:
+            return abs(idx1 - idx2) == 1
+
+    if len(x_wires) == 1 or len(y_wires) == len(wires_in_node) or abs(
+            tile1_x - tile2_x) == 0:
+        ordered_wires = sorted(y_wires.keys())
+
+        idx1 = ordered_wires.index(tile1_y)
+        idx2 = ordered_wires.index(tile2_y)
+
+        if len(y_wires[tile1_y]) == 1 and len(y_wires[tile2_y]) == 1:
+            return abs(idx1 - idx2) == 1
+
+    return None
+
+
+def only_wire(tile1, tile2, tiles, x_wires, y_wires):
+    """ only_wire returns true if tile1 and tile2 only have 1 wire in their respective x or y dimension.
+  """
+    tile1_info = tiles[tile1]
+    tile2_info = tiles[tile2]
+
+    tile1_x = tile1_info['grid_x']
+    tile2_x = tile2_info['grid_x']
+
+    tiles_x_adjacent = abs(tile1_x - tile2_x) == 1
+    if tiles_x_adjacent and len(x_wires[tile1_x]) == 1 and len(
+            x_wires[tile2_x]) == 1:
+        return True
+
+    tile1_y = tile1_info['grid_y']
+    tile2_y = tile2_info['grid_y']
+
+    tiles_y_adjacent = abs(tile1_y - tile2_y) == 1
+    if tiles_y_adjacent and len(y_wires[tile1_y]) == 1 and len(
+            y_wires[tile2_y]) == 1:
+        return True
+
+    return None
+
+
+def is_directly_connected(node, node_tree, wire1, wire2):
+    if 'wires' in node_tree:
+        node_tree_wires = node_tree['wires']
+    else:
+        if len(node_tree['edges']) == 1 and len(node_tree['joins']) == 0:
+            node_tree_wires = node_tree['edges'][0]
+        else:
+            return None
+
+    if wire1 not in node_tree_wires:
+        return None
+    if wire2 not in node_tree_wires:
+        return None
+
+    # Is there than edge that has wire1 next to wire2?
+    for edge in node_tree['edges']:
+        idx1 = None
+        idx2 = None
+        try:
+            idx1 = edge.index(wire1)
+        except ValueError:
+            pass
+
+        try:
+            idx2 = edge.index(wire2)
+        except ValueError:
+            pass
+
+        if idx1 is not None and idx2 is not None:
+            return abs(idx1 - idx2) == 1
+
+        if idx1 is not None and (idx1 != 0 and idx1 != len(edge) - 1):
+            return False
+
+        if idx2 is not None and (idx2 != 0 and idx2 != len(edge) - 1):
+            return False
+
+    # Is there a join of nodes between wire1 and wire2?
+    if wire1 in node_tree['joins']:
+        return wire2 in node_tree['joins'][wire1]
+
+    if wire2 in node_tree['joins']:
+        assert wire1 not in node_tree['joins'][wire2]
+
+    return None
+
+
+def is_connected(wire1, tile1, wire2, tile2, node, wires_in_tiles, wire_map,
+                 node_tree, tiles, x_wires, y_wires, wires_in_node):
+    """ Check if two wires are directly connected. """
+
+    next_wire_in_dim = next_wire_in_dimension(wire1, tile1, wire2, tile2,
+                                              tiles, x_wires, y_wires,
+                                              wire_map, wires_in_node)
+    if next_wire_in_dim is not None:
+        return next_wire_in_dim
+
+    # Because there are multiple possible wire connections between these two
+    # tiles, consult the node_tree to determine if the two wires are actually connected.
+    #
+    # Warning: The node_tree is incomplete because it is not know how to extract
+    # ordered wire information from the node.
+    #
+    # Example node CLK_BUFG_REBUF_X60Y142/CLK_BUFG_REBUF_R_CK_GCLK0_BOT
+    # It does not appear to be possible to get ordered wire connection information
+    # for the first two wires connected to PIP
+    # CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP
+    #
+    # However, it happens to be that theses wires are the only wires in their
+    # tiles, so the earlier "only wires in tile" check will pass.
+
+    connected = is_directly_connected(node['node'], node_tree[node['node']],
+                                      wire1, wire2)
+    if connected is not None:
+        return connected
+
+    is_only_wire = only_wire(tile1, tile2, tiles, x_wires, y_wires)
+    if is_only_wire is not None:
+        return is_only_wire
+
+    # The node_tree didn't specify these wires, and the wires are not
+    # unambiguously connected.
+    return False
+
+
+def edge_overlap(low1, high1, low2, high2):
+    """ Returns true if two lines have >0 overlap
+
+    >>> edge_overlap(0, 1, 1, 2)
+    False
+    >>> edge_overlap(0, 2, 1, 2)
+    True
+    >>> edge_overlap(1, 2, 1, 2)
+    True
+    >>> edge_overlap(1, 2, 0, 1)
+    False
+    >>> edge_overlap(1, 2, 0, 2)
+    True
+    >>> edge_overlap(0, 1, 0, 1)
+    True
+    """
+    if low1 < low2:
+        return low2 < high1
+    else:
+        return low1 < high2
+
+def box_share_edge(box1, box2):
+    """ Return true if the two boxes share any edge.
+
+    >>> box_share_edge(((0, 1), (0, 1)), ((0, 1), (1, 2)))
+    True
+    >>> box_share_edge(((0, 1), (0, 1)), ((1, 2), (1, 2)))
+    False
+    >>> box_share_edge(((0, 1), (0, 3)), ((1, 2), (2, 5)))
+    True
+    >>> box_share_edge(((0, 1), (0, 3)), ((1, 2), (3, 6)))
+    False
+    >>> box_share_edge(((0, 3), (0, 1)), ((2, 5), (1, 2)))
+    True
+    >>> box_share_edge(((0, 3), (0, 1)), ((3, 6), (1, 2)))
+    False
+    >>> box_share_edge(((0, 3), (0, 3)), ((0, 3), (3, 6)))
+    True
+    >>> box_share_edge(((0, 3), (0, 3)), ((3, 6), (0, 3)))
+    True
+    >>> box_share_edge(((0, 3), (0, 3)), ((3, 6), (3, 6)))
+    False
+
+    """
+    ((box1_xlow, box1_xhigh), (box1_ylow, box1_yhigh)) = box1
+    ((box2_xlow, box2_xhigh), (box2_ylow, box2_yhigh)) = box2
+
+    if box1_xlow == box2_xhigh or box2_xlow == box1_xhigh:
+        # box 1 left edge may touch box 2 right edge
+        #  or
+        # box 2 left edge may touch box 1 right edge
+        if edge_overlap(box1_ylow, box1_yhigh, box2_ylow, box2_yhigh):
+            return True
+
+    if box1_ylow == box2_yhigh or box2_ylow == box1_yhigh:
+        # box 1 bottom edge may touch box 1 top edge
+        #  or
+        # box 2 bottom edge may touch box 2 top edge
+        if edge_overlap(box1_xlow, box1_xhigh, box2_xlow, box2_xhigh):
+            return True
+
+    return False
+
+
+def tiles_are_adjcent(tile1, tile2, tile_type_sizes):
+    width1, height1 = tile_type_sizes[tile1['type']]
+    tile1_xlow = tile1['grid_x']
+    tile1_ylow = tile1['grid_y'] - height1
+    tile1_xhigh = tile1['grid_x'] + width1
+    tile1_yhigh = tile1['grid_y']
+
+    width2, height2 = tile_type_sizes[tile2['type']]
+    tile2_xlow = tile2['grid_x']
+    tile2_ylow = tile2['grid_y'] - height2
+    tile2_xhigh = tile2['grid_x'] + width2
+    tile2_yhigh = tile2['grid_y']
+
+    return box_share_edge(
+            ((tile1_xlow, tile1_xhigh), (tile1_ylow, tile1_yhigh)),
+            ((tile2_xlow, tile2_xhigh), (tile2_ylow, tile2_yhigh)),
+            )
+
+
+def process_node(tileconn, key_history, node, wire_map, node_tree, tiles, tile_type_sizes):
+    wires = [wire['wire'] for wire in node['wires']]
+
+    wires_in_tiles = {}
+    x_wires = {}
+    y_wires = {}
+    for wire in wires:
+        wire_info = wire_map[wire]
+
+        if wire_info['tile'] not in wires_in_tiles:
+            wires_in_tiles[wire_info['tile']] = []
+        wires_in_tiles[wire_info['tile']].append(wire)
+
+        grid_x = tiles[wire_info['tile']]['grid_x']
+        if grid_x not in x_wires:
+            x_wires[grid_x] = []
+        x_wires[grid_x].append(wire)
+
+        grid_y = tiles[wire_info['tile']]['grid_y']
+        if grid_y not in y_wires:
+            y_wires[grid_y] = []
+        y_wires[grid_y].append(wire)
+
+    if len(wires) == 2:
+        wire1 = wires[0]
+        wire_info1 = wire_map[wire1]
+        wire2 = wires[1]
+        wire_info2 = wire_map[wire2]
+        update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2,
+                         wire_info2, tiles)
+        return
+
+    for idx, wire1 in enumerate(wires):
+        wire_info1 = wire_map[wire1]
+        for wire2 in wires[idx + 1:]:
+            wire_info2 = wire_map[wire2]
+
+            tile1 = tiles[wire_info1['tile']]
+            tile2 = tiles[wire_info2['tile']]
+
+            if not tiles_are_adjcent(tile1, tile2, tile_type_sizes):
+                continue
+
+            if not is_connected(wire1, wire_info1['tile'], wire2,
+                                wire_info2['tile'], node, wires_in_tiles,
+                                wire_map, node_tree, tiles, x_wires, y_wires,
+                                wires):
+                continue
+
+            update_tile_conn(tileconn, key_history, wire1, wire_info1, wire2,
+                             wire_info2, tiles)
+
+
+def update_tile_conn(tileconn, key_history, wirename1, wire1, wirename2, wire2,
+                     tiles):
+    # Ensure that (wire1, wire2) is sorted, so we can easy check if a connection
+    # already exists.
+
+    tile1 = tiles[wire1['tile']]
+    tile2 = tiles[wire2['tile']]
+    if ((wire1['type'], wire1['shortname'], tile1['grid_x'], tile1['grid_y']) >
+        (wire2['type'], wire2['shortname'], tile2['grid_x'], tile2['grid_y'])):
+        wire1, tile1, wire2, tile2 = wire2, tile2, wire1, tile1
+
+    tileconn.append({
+        "grid_deltas": [
+            tile2['grid_x'] - tile1['grid_x'],
+            tile2['grid_y'] - tile1['grid_y'],
+        ],
+        "tile_types": [
+            tile1['type'],
+            tile2['type'],
+        ],
+        "wire_pair": [
+            wire1['shortname'],
+            wire2['shortname'],
+        ],
+    })
+
+
+def flatten_tile_conn(tileconn):
+    """ Convert tileconn that is key'd to identify specific wire pairs between tiles
+  key (tile1_type, wire1_name, tile2_type, wire2_name) to flat tile connect list
+  that relates tile types and relative coordinates and a full list of wires to
+  connect. """
+    flat_tileconn = {}
+
+    for conn in tileconn:
+        key = (tuple(conn['tile_types']), tuple(conn['grid_deltas']))
+
+        if key not in flat_tileconn:
+            flat_tileconn[key] = {
+                'tile_types': conn['tile_types'],
+                'grid_deltas': conn['grid_deltas'],
+                'wire_pairs': set()
+            }
+
+        flat_tileconn[key]['wire_pairs'].add(tuple(conn['wire_pair']))
+
+    def inner():
+        for output in flat_tileconn.values():
+            yield {
+                'tile_types': output['tile_types'],
+                'grid_deltas': output['grid_deltas'],
+                'wire_pairs': tuple(output['wire_pairs']),
+            }
+
+    return tuple(inner())
+
+
+def is_tile_type(tiles, coord_to_tile, coord, tile_type):
+    if coord not in coord_to_tile:
+        return False
+
+    target_tile = tiles[coord_to_tile[coord]]
+    return target_tile['type'] == tile_type
+
+
+def get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles):
+    """ Yields (tile_coord, wire) for each wire that should be connected to specified wire. """
+    pair = conn['wire_pairs'][idx]
+    wire_tile_type = wire_info['type']
+    tile_types = conn['tile_types']
+    shortname = wire_info['shortname']
+    grid_deltas = conn['grid_deltas']
+
+    wire1 = tile_types[0] == wire_tile_type and shortname == pair[0]
+    wire2 = tile_types[1] == wire_tile_type and shortname == pair[1]
+    assert wire1 or wire2, (wire, conn)
+
+    tile_of_wire = wire_info['tile']
+    start_coord_x = tiles[tile_of_wire]['grid_x']
+    start_coord_y = tiles[tile_of_wire]['grid_y']
+    if wire1:
+        target_coord_x = start_coord_x + grid_deltas[0]
+        target_coord_y = start_coord_y + grid_deltas[1]
+        target_tile_type = tile_types[1]
+
+        target_wire = pair[1]
+        target_tile = (target_coord_x, target_coord_y)
+
+        if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
+            yield target_tile, target_wire
+
+    if wire2:
+        target_coord_x = start_coord_x - grid_deltas[0]
+        target_coord_y = start_coord_y - grid_deltas[1]
+        target_tile_type = tile_types[0]
+
+        target_wire = pair[0]
+        target_tile = (target_coord_x, target_coord_y)
+
+        if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
+            yield target_tile, target_wire
+
+
+def make_connection(wire_nodes, wire1, wire2):
+    if wire_nodes[wire1] is wire_nodes[wire2]:
+        assert wire1 in wire_nodes[wire1]
+        assert wire2 in wire_nodes[wire2]
+        return
+
+    new_node = wire_nodes[wire1] | wire_nodes[wire2]
+
+    for wire in new_node:
+        wire_nodes[wire] = new_node
+
+
+def create_coord_to_tile(tiles):
+    coord_to_tile = {}
+    for tile, tileinfo in tiles.items():
+        coord_to_tile[(tileinfo['grid_x'], tileinfo['grid_y'])] = tile
+
+    return coord_to_tile
+
+
+def connect_wires(tiles, tileconn, wire_map):
+    """ Connect individual wires into groups of wires called nodes. """
+
+    # Initialize all nodes to originally only contain the wire by itself.
+    wire_nodes = {}
+    for wire in wire_map:
+        wire_nodes[wire] = set([wire])
+
+    wire_connection_map = {}
+    for conn in tileconn:
+        for idx, (wire1, wire2) in enumerate(conn['wire_pairs']):
+            key1 = (conn['tile_types'][0], wire1)
+            if key1 not in wire_connection_map:
+                wire_connection_map[key1] = []
+            wire_connection_map[key1].append((conn, idx))
+
+            key2 = (conn['tile_types'][1], wire2)
+            if key2 not in wire_connection_map:
+                wire_connection_map[key2] = []
+            wire_connection_map[key2].append((conn, idx))
+
+    coord_to_tile = create_coord_to_tile(tiles)
+
+    for wire, wire_info in progressbar.progressbar(wire_map.items()):
+        key = (wire_info['type'], wire_info['shortname'])
+        if key not in wire_connection_map:
+            continue
+
+        for conn, idx in wire_connection_map[key]:
+            for target_tile, target_wire in get_connections(
+                    wire, wire_info, conn, idx, coord_to_tile, tiles):
+
+                full_wire_name = coord_to_tile[target_tile] + '/' + target_wire
+                assert wire_map[full_wire_name]['shortname'] == target_wire, (
+                    target_tile, target_wire, wire, conn)
+                assert wire_map[full_wire_name]['tile'] == coord_to_tile[
+                    target_tile], (wire_map[full_wire_name]['tile'],
+                                   coord_to_tile[target_tile])
+
+                make_connection(wire_nodes, wire, full_wire_name)
+
+    # Find unique nodes
+    nodes = {}
+    for node in wire_nodes.values():
+        nodes[id(node)] = node
+
+    # Flatten to list of lists.
+    return tuple(tuple(node) for node in nodes.values())
+
+
+def generate_tilegrid(pool, tiles):
+    wire_map = {}
+
+    grid = {}
+
+    num_tiles = 0
+    for tile_type in tiles:
+        num_tiles += len(tiles[tile_type])
+
+    idx = 0
+    with progressbar.ProgressBar(max_value=num_tiles) as bar:
+        for tile_type in tiles:
+            for tile in pool.imap_unordered(
+                    get_tile_grid_info,
+                    tiles[tile_type],
+                    chunksize=20,
+            ):
+                bar.update(idx)
+
+                assert len(tile) == 1, tile
+                tilename = tuple(tile.keys())[0]
+
+                for wire in tile[tilename]['wires']:
+                    assert wire not in wire_map, (wire, wire_map)
+                    assert wire.startswith(tilename + '/'), (wire, tilename)
+
+                    wire_map[wire] = {
+                        'tile': tilename,
+                        'type': tile[tilename]['type'],
+                        'shortname': wire[len(tilename) + 1:],
+                    }
+
+                del tile[tilename]['wires']
+                grid.update(tile)
+
+                idx += 1
+                bar.update(idx)
+
+    return grid, wire_map
+
+
+def generate_tileconn(pool, node_tree, nodes, wire_map, grid, tile_type_sizes):
+    tileconn = []
+    key_history = {}
+    raw_node_data = []
+    with progressbar.ProgressBar(max_value=len(nodes)) as bar:
+        for idx, node in enumerate(
+                pool.imap_unordered(
+                    read_json5,
+                    nodes,
+                    chunksize=20,
+                )):
+            bar.update(idx)
+            raw_node_data.append(node)
+            process_node(tileconn, key_history, node, wire_map, node_tree,
+                         grid, tile_type_sizes)
+            bar.update(idx + 1)
+
+    tileconn = flatten_tile_conn(tileconn)
+
+    return tileconn, raw_node_data
+
+
+def max_size_for_tile(tile, grid, tile_by_loc, rclk_rows):
+    """ Guess maximum size for a tile. """
+    tile_type = grid[tile]['type']
+    if tile_type== 'NULL':
+        return (1, 1)
+
+    # Pos X, Neg Y
+    base_grid_x = grid[tile]['grid_x']
+    base_grid_y = grid[tile]['grid_y']
+
+    # Walk up X
+    grid_x = base_grid_x
+    grid_y = base_grid_y
+    while True:
+        try:
+            next_tile = tile_by_loc[(grid_x+1, grid_y)]
+        except KeyError:
+            break
+
+        if grid[next_tile]['type'] != 'NULL':
+            break
+
+        grid_x += 1
+
+    max_grid_x = grid_x
+
+    # Walk down Y
+    grid_x = base_grid_x
+    grid_y = base_grid_y
+    while True:
+        # Most tiles don't control the RCLK row, but a handful of tiles do!
+        if grid_y-1 in rclk_rows and tile_type not in [
+                "CFRM_AMS_CFGIO",
+                "PSS_ALTO",
+                "CFG_CONFIG",
+                ]:
+            break
+
+        try:
+            next_tile = tile_by_loc[(grid_x, grid_y-1)]
+        except KeyError:
+            break
+
+        if grid[next_tile]['type'] != 'NULL':
+            break
+
+        grid_y -= 1
+
+    max_grid_y = grid_y
+
+    return (max_grid_x-base_grid_x+1), (base_grid_y-max_grid_y+1)
+
+
+def generate_tile_type_sizes(grid):
+    """ Generate tile sizes for the given grid using max_size_for_tile. """
+    tile_type_sizes = {}
+
+    tile_by_loc = {}
+
+    rclk_rows = set()
+
+    for tile, gridinfo in grid.items():
+        key = gridinfo['grid_x'], gridinfo['grid_y']
+        assert key not in tile_by_loc
+        tile_by_loc[key] = tile
+
+        if gridinfo['type'] in ['RCLK_INT_R', 'RCLK_INT_L']:
+            rclk_rows.add(gridinfo['grid_y'])
+
+    for tile, gridinfo in grid.items():
+        size_x, size_y = max_size_for_tile(tile, grid, tile_by_loc, rclk_rows)
+        if gridinfo['type'] not in tile_type_sizes:
+            tile_type_sizes[gridinfo['type']] = (size_x, size_y)
+        else:
+            old_size_x, old_size_y = tile_type_sizes[gridinfo['type']]
+            tile_type_sizes[gridinfo['type']] = (
+                    min(size_x, old_size_x),
+                    min(size_y, old_size_y),
+                    )
+
+    return tile_type_sizes
+
+
+def commit_tile_type(grid, tile_type, size):
+    """ First verify that expanding the specified tile type by the size
+    specified works, then modify the tile grid to reflect the expanded sizes.
+    """
+    gridinfo_by_loc = {}
+    locs = []
+    for tile in grid:
+        gridinfo = grid[tile]
+        key = gridinfo['grid_x'], gridinfo['grid_y']
+        assert key not in gridinfo_by_loc
+        gridinfo_by_loc[key] = gridinfo
+
+        if gridinfo['type'] == tile_type:
+            locs.append(key)
+
+    for loc in locs:
+        for dx in range(size[0]):
+            for dy in range(size[1]):
+                key = loc[0] + dx, loc[1] - dy
+
+                if dx == 0 and dy == 0:
+                    assert gridinfo_by_loc[key]['type'] == tile_type
+                else:
+                    assert gridinfo_by_loc[key]['type'] == 'NULL', (
+                            loc, key, tile_type, gridinfo_by_loc[key]['type'])
+
+    for loc in locs:
+        for dx in range(size[0]):
+            for dy in range(size[1]):
+                    gridinfo_by_loc[key]['type'] = 'expanded_' + tile_type
+
+
+def guess_tile_type_sizes(grid):
+    """ Guess tile type sizes by expanding, and then blocking by largest."""
+    grid = copy.deepcopy(grid)
+    tile_type_sizes = generate_tile_type_sizes(grid)
+    commited_types = set()
+
+    # Commit longest or widest tiles first
+    tile_type_by_size = sorted(tile_type_sizes, key=lambda k: max(tile_type_sizes[k][0],tile_type_sizes[k][1]), reverse=True)
+    for tile_type in tile_type_by_size:
+        if tile_type_sizes[tile_type] == (1, 1):
+            continue
+
+        recompute_sizes = False
+        try:
+            # Attempt to commit this tile type
+            commit_tile_type(grid, tile_type, tile_type_sizes[tile_type])
+        except AssertionError:
+            # Commit failed, try to recompute and then re-commit.
+            recompute_sizes = True
+
+        if recompute_sizes:
+            new_tile_type_sizes = generate_tile_type_sizes(grid)
+
+            for tile_type in new_tile_type_sizes:
+                if tile_type in commited_types:
+                    continue
+                if tile_type.startswith('expanded_'):
+                    continue
+
+                tile_type_sizes[tile_type] = new_tile_type_sizes[tile_type]
+
+            commit_tile_type(grid, tile_type, tile_type_sizes[tile_type])
+
+        commited_types.add(tile_type)
+
+    return tile_type_sizes
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=
+        "Reduces raw database dump into prototype tiles, grid, and connections."
+    )
+    parser.add_argument('--root_dir', required=True)
+    parser.add_argument('--output_dir', required=True)
+    parser.add_argument('--verify_only', action='store_true')
+    parser.add_argument('--ignored_wires')
+    parser.add_argument('--max_cpu', type=int, default=10)
+
+    args = parser.parse_args()
+
+    tiles, nodes = lib.read_root_csv(args.root_dir)
+
+    processes = min(multiprocessing.cpu_count(), args.max_cpu)
+    print('{} Running {} processes'.format(datetime.datetime.now(), processes))
+    pool = multiprocessing.Pool(processes=processes)
+
+    node_tree_file = os.path.join(args.output_dir, 'node_tree.json')
+
+    tileconn_file = os.path.join(args.output_dir, 'tileconn.json')
+    wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle')
+
+    print('{} Reading tilegrid'.format(datetime.datetime.now()))
+    with open(
+            os.path.join(util.get_db_root(), util.get_part(),
+                         'tilegrid.json')) as f:
+        grid = json.load(f)
+
+    if not args.verify_only:
+        print('{} Creating tile map'.format(datetime.datetime.now()))
+        grid2, wire_map = generate_tilegrid(pool, tiles)
+
+        # Make sure tilegrid from 005-tilegrid matches tilegrid from
+        # generate_tilegrid.
+        db_grid_keys = set(grid.keys())
+        generated_grid_keys = set(grid2.keys())
+        assert db_grid_keys == generated_grid_keys, (
+            db_grid_keys ^ generated_grid_keys)
+
+        for tile in db_grid_keys:
+            for k in grid2[tile]:
+                assert k in grid[tile], k
+                assert grid[tile][k] == grid2[tile][k], (tile, k,
+                                                         grid[tile][k],
+                                                         grid2[tile][k])
+
+        with open(wire_map_file, 'wb') as f:
+            pickle.dump(wire_map, f)
+
+        print('{} Reading node tree'.format(datetime.datetime.now()))
+        with open(node_tree_file) as f:
+            node_tree = json.load(f)
+
+        tile_type_sizes = guess_tile_type_sizes(grid)
+
+        print('{} Creating tile connections'.format(datetime.datetime.now()))
+        tileconn, raw_node_data = generate_tileconn(pool, node_tree, nodes,
+                                                    wire_map, grid, tile_type_sizes)
+
+        for data in tileconn:
+            data['wire_pairs'] = tuple(
+                sorted(
+                    data['wire_pairs'],
+                    key=lambda x: tuple(extract_numbers(s) for s in x)))
+
+        tileconn = tuple(
+            sorted(
+                tileconn, key=lambda x: (x['tile_types'], x['grid_deltas'])))
+
+        print('{} Writing tileconn'.format(datetime.datetime.now()))
+        with open(tileconn_file, 'w') as f:
+            json.dump(tileconn, f, indent=2, sort_keys=True)
+    else:
+        with open(wire_map_file, 'rb') as f:
+            wire_map = pickle.load(f)
+
+        print('{} Reading raw_node_data'.format(datetime.datetime.now()))
+        raw_node_data = []
+        with progressbar.ProgressBar(max_value=len(nodes)) as bar:
+            for idx, node in enumerate(
+                    pool.imap_unordered(
+                        read_json5,
+                        nodes,
+                        chunksize=20,
+                    )):
+                bar.update(idx)
+                raw_node_data.append(node)
+                bar.update(idx + 1)
+
+        print('{} Reading tileconn'.format(datetime.datetime.now()))
+        with open(tileconn_file) as f:
+            tileconn = json.load(f)
+
+    wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle')
+    if os.path.exists(wire_nodes_file) and args.verify_only:
+        with open(wire_nodes_file, 'rb') as f:
+            wire_nodes = pickle.load(f)
+    else:
+        print("{} Connecting wires to verify tileconn".format(
+            datetime.datetime.now()))
+        wire_nodes = connect_wires(grid, tileconn, wire_map)
+        with open(wire_nodes_file, 'wb') as f:
+            pickle.dump(wire_nodes, f)
+
+    print('{} Verifing tileconn'.format(datetime.datetime.now()))
+    error_nodes = []
+    lib.verify_nodes(
+        [(node['node'], tuple(wire['wire'] for wire in node['wires']))
+         for node in raw_node_data], wire_nodes, error_nodes)
+
+    if len(error_nodes) > 0:
+        error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json')
+        with open(error_nodes_file, 'w') as f:
+            json.dump(error_nodes, f, indent=2, sort_keys=True)
+
+        ignored_wires = []
+        ignored_wires_file = args.ignored_wires
+        if os.path.exists(ignored_wires_file):
+            with open(ignored_wires_file) as f:
+                ignored_wires = set(l.strip() for l in f)
+
+        if not lib.check_errors(error_nodes, ignored_wires):
+            print('{} errors detected, see {} for details.'.format(
+                len(error_nodes), error_nodes_file))
+            sys.exit(1)
+        else:
+            print(
+                '{} errors ignored because of {}\nSee {} for details.'.format(
+                    len(error_nodes), ignored_wires_file, error_nodes_file))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/fuzzers/004-tileinfo/generate_ignore_list.py b/fuzzers/004-tileinfo/generate_ignore_list.py
new file mode 100644
index 0000000..5cd0be1
--- /dev/null
+++ b/fuzzers/004-tileinfo/generate_ignore_list.py
@@ -0,0 +1,32 @@
+import json
+
+with open('output/error_nodes.json') as f:
+    flat_error_nodes = json.load(f)
+
+error_nodes = {}
+for node, raw_node, generated_nodes in flat_error_nodes:
+    if node not in error_nodes:
+        error_nodes[node] = {
+            'raw_node': set(raw_node),
+            'generated_nodes': set(),
+        }
+
+    assert error_nodes[node]['raw_node'] == set(raw_node)
+    error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
+
+for node, error in error_nodes.items():
+    combined_generated_nodes = set()
+    for generated_node in error['generated_nodes']:
+        combined_generated_nodes |= set(generated_node)
+
+    assert error['raw_node'] == combined_generated_nodes, (node, error)
+
+    good_node = max(error['generated_nodes'], key=lambda x: len(x))
+    bad_nodes = error['generated_nodes'] - set((good_node, ))
+
+    if max(len(generated_node) for generated_node in bad_nodes) > 1:
+        assert False, node
+    else:
+        for generated_node in bad_nodes:
+            for wire in generated_node:
+                print(wire)
diff --git a/fuzzers/004-tileinfo/get_nodescount.tcl b/fuzzers/004-tileinfo/get_nodescount.tcl
new file mode 100644
index 0000000..0c6fc89
--- /dev/null
+++ b/fuzzers/004-tileinfo/get_nodescount.tcl
@@ -0,0 +1,13 @@
+create_project -force -part $::env(URAY_PART) design design
+set_property design_mode PinPlanning [current_fileset]
+open_io_design -name io_1
+
+#set_param tcl.collectionResultDisplayLimit 0
+set_param messaging.disableStorage 1
+
+set nbnodes_fp [open nb_nodes.txt w]
+
+set nodes [get_nodes]
+puts $nbnodes_fp [llength $nodes]
+
+close $nbnodes_fp
diff --git a/fuzzers/004-tileinfo/get_speed_model.tcl b/fuzzers/004-tileinfo/get_speed_model.tcl
new file mode 100644
index 0000000..93a5b7c
--- /dev/null
+++ b/fuzzers/004-tileinfo/get_speed_model.tcl
@@ -0,0 +1,65 @@
+set filename [lindex $argv 0]
+
+create_project -force -part $::env(URAY_PART) -name $filename
+set_property design_mode PinPlanning [current_fileset]
+open_io_design -name io_1
+
+set_param messaging.disableStorage 1
+set fp [open $filename r]
+set file_data [read $fp]
+close $fp
+
+set fp [open $filename w]
+
+set indices [split $file_data "\n"]
+
+# Convert DRIVE from ??? units to 10^(-3 to -6) Ohms
+set MAGIC 0.6875
+
+puts $fp "\{"
+
+
+foreach index $indices {
+    if {$index == ""} {
+        continue
+    }
+
+    set split_index [split $index ","]
+    set resource [lindex $split_index 0]
+    set resource_index [lindex $split_index 1]
+
+    puts $fp "\t\"$resource_index\":"
+    puts $fp "\t\t\{"
+
+    if {$resource == "site_pin"} {
+    } elseif {$resource == "pip"} {
+        # Getting all site_pin information
+        set speed_model [get_speed_models -filter "SPEED_INDEX == $resource_index"]
+
+        puts $fp "\t\t\t\"resource_name\": \"$resource\","
+
+        set model_type [get_speed_model_name [get_property TYPE $speed_model]]
+        puts $fp "\t\t\t\"delay\":\["
+        puts $fp "\t\t\t\t\"[get_property FAST_MIN $forward_speed_model]\","
+        puts $fp "\t\t\t\t\"[get_property FAST_MAX $forward_speed_model]\","
+        puts $fp "\t\t\t\t\"[get_property SLOW_MIN $forward_speed_model]\","
+        puts $fp "\t\t\t\t\"[get_property SLOW_MAX $forward_speed_model]\","
+        puts $fp "\t\t\t\],"
+    } elseif {$resource == "wire"} {
+        # Getting all wire information
+        set speed_model [get_speed_models -filter "SPEED_INDEX == $resource_index"]
+
+        puts $fp "\t\t\t\"resource_name\": \"$resource\","
+        puts $fp "\t\t\t\"res\":\"[get_property WIRE_RES $speed_model]\","
+        puts $fp "\t\t\t\"cap\":\"[get_property WIRE_CAP $speed_model]\","
+    } else {
+        puts "STUFF TO READ $index $resource"
+        exit 2
+    }
+
+    puts $fp "\t\t\},"
+}
+
+puts $fp "\}"
+
+close $fp
diff --git a/fuzzers/004-tileinfo/get_tilescount.tcl b/fuzzers/004-tileinfo/get_tilescount.tcl
new file mode 100644
index 0000000..f830db6
--- /dev/null
+++ b/fuzzers/004-tileinfo/get_tilescount.tcl
@@ -0,0 +1,13 @@
+create_project -force -part $::env(URAY_PART) design design
+set_property design_mode PinPlanning [current_fileset]
+open_io_design -name io_1
+
+#set_param tcl.collectionResultDisplayLimit 0
+set_param messaging.disableStorage 1
+
+set nbtiles_fp [open nb_tiles.txt w]
+
+set tiles [get_tiles]
+puts $nbtiles_fp [llength $tiles]
+
+close $nbtiles_fp
diff --git a/fuzzers/004-tileinfo/grid_tile_sizes.py b/fuzzers/004-tileinfo/grid_tile_sizes.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/fuzzers/004-tileinfo/grid_tile_sizes.py
diff --git a/fuzzers/004-tileinfo/jobnodes.tcl b/fuzzers/004-tileinfo/jobnodes.tcl
new file mode 100644
index 0000000..2793c5d
--- /dev/null
+++ b/fuzzers/004-tileinfo/jobnodes.tcl
@@ -0,0 +1,45 @@
+set blocknb [lindex $argv 0]
+set start [expr int([lindex $argv 1])]
+set stop [expr int([lindex $argv 2])]
+
+create_project -force -part $::env(URAY_PART) $blocknb $blocknb
+set_property design_mode PinPlanning [current_fileset]
+open_io_design -name io_1
+
+#set_param tcl.collectionResultDisplayLimit 0
+set_param messaging.disableStorage 1
+
+set root_fp [open "root_node_${blocknb}.csv" w]
+
+set nodes [get_nodes]
+
+for {set j $start } { $j < $stop } { incr j } {
+    set node [lindex $nodes $j]
+
+    file mkdir [file dirname "${node}"]
+    set fname $node.json5
+    puts $root_fp "node,,$fname"
+
+    set fp [open "${fname}" w]
+    # node properties:
+    # BASE_CLOCK_REGION CLASS COST_CODE COST_CODE_NAME IS_BAD IS_COMPLETE
+    # IS_GND IS_INPUT_PIN IS_OUTPUT_PIN IS_PIN IS_VCC NAME NUM_WIRES PIN_WIRE
+    # SPEED_CLASS
+    puts $fp "\{"
+    puts $fp "\t\"node\": \"$node\","
+    puts $fp "\t\"wires\": \["
+    foreach wire [get_wires -of_objects $node] {
+        # wire properties:
+        # CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN
+        # IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS
+        # NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET
+        puts $fp "\t\t\{"
+        puts $fp "\t\t\t\"wire\":\"$wire\","
+        puts $fp "\t\t\},"
+    }
+    puts $fp "\t\]"
+    puts $fp "\}"
+    close $fp
+}
+
+close $root_fp
diff --git a/fuzzers/004-tileinfo/jobtiles.tcl b/fuzzers/004-tileinfo/jobtiles.tcl
new file mode 100644
index 0000000..4368163
--- /dev/null
+++ b/fuzzers/004-tileinfo/jobtiles.tcl
@@ -0,0 +1,133 @@
+set blocknb [lindex $argv 0]
+set start [expr int([lindex $argv 1])]
+set stop [expr int([lindex $argv 2])]
+
+create_project -force -part $::env(URAY_PART) $blocknb $blocknb
+set_property design_mode PinPlanning [current_fileset]
+open_io_design -name io_1
+
+#set_param tcl.collectionResultDisplayLimit 0
+set_param messaging.disableStorage 1
+
+set root_fp [open "root_${blocknb}.csv" w]
+#puts $root_fp "filetype,subtype,filename"
+
+set tiles [get_tiles]
+
+for {set j $start } { $j < $stop } { incr j } {
+
+    set tile [lindex $tiles $j]
+
+    set fname tile_$tile.json5
+    set tile_type [get_property TYPE $tile]
+    puts $root_fp "tile,$tile_type,$fname"
+
+    set fp [open "${fname}" w]
+    puts $fp "\{"
+    puts $fp "\t\"tile\": \"$tile\","
+    # tile properties:
+    # CLASS COLUMN DEVICE_ID FIRST_SITE_ID GRID_POINT_X GRID_POINT_Y INDEX
+    # INT_TILE_X INT_TILE_Y IS_CENTER_TILE IS_DCM_TILE IS_GT_CLOCK_SITE_TILE
+    # IS_GT_SITE_TILE NAME NUM_ARCS NUM_SITES ROW SLR_REGION_ID
+    # TILE_PATTERN_IDX TILE_TYPE TILE_TYPE_INDEX TILE_X TILE_Y TYPE
+    puts $fp "\t\"type\": \"$tile_type\","
+    puts $fp "\t\"x\": [get_property GRID_POINT_X $tile],"
+    puts $fp "\t\"y\": [get_property GRID_POINT_Y $tile],"
+    puts $fp "\t\"sites\": \["
+    foreach site [get_sites -of_objects $tile] {
+        # site properties:
+        # ALTERNATE_SITE_TYPES CLASS CLOCK_REGION IS_BONDED IS_CLOCK_BUFFER
+        # IS_CLOCK_PAD IS_GLOBAL_CLOCK_BUFFER IS_GLOBAL_CLOCK_PAD IS_PAD
+        # IS_REGIONAL_CLOCK_BUFFER IS_REGIONAL_CLOCK_PAD IS_RESERVED IS_TEST
+        # IS_USED MANUAL_ROUTING NAME NUM_ARCS NUM_BELS NUM_INPUTS NUM_OUTPUTS
+        # NUM_PINS PRIMITIVE_COUNT PROHIBIT PROHIBIT_FROM_PERSIST RPM_X RPM_Y
+        # SITE_PIPS SITE_TYPE
+
+        puts $fp "\t\t\{"
+        puts $fp "\t\t\t\"site\":\"$site\","
+        puts $fp "\t\t\t\"type\":\"[get_property SITE_TYPE $site]\","
+        puts $fp "\t\t\t\"site_pins\": \["
+        foreach site_pin [get_site_pins -of_objects $site] {
+            # site_pin properties:
+            # CLASS DIRECTION INDEX INDEX_IN_BUS INDEX_IN_SITE INDEX_IN_TILE IS_BAD
+            # IS_INPUT IS_OUTPUT IS_PART_OF_BUS IS_TEST IS_USED NAME SITE_ID
+            # SPEED_INDEX
+            puts $fp "\t\t\t\{"
+            puts $fp "\t\t\t\t\"site_pin\":\"$site_pin\","
+
+            set site_pin_speed_model_index [get_property SPEED_INDEX $site_pin]
+            puts $fp "\t\t\t\t\"speed_model_index\":\"$site_pin_speed_model_index\","
+
+            set dir [get_property DIRECTION $site_pin]
+            puts $fp "\t\t\t\t\"direction\":\"$dir\","
+            set site_pin_node [get_nodes -of_objects $site_pin]
+            if {[llength $site_pin_node] == 0} {
+                puts $fp "\t\t\t\t\"node\":null,"
+            } else {
+                puts $fp "\t\t\t\t\"node\":\"$site_pin_node\","
+            }
+            puts $fp "\t\t\t\},"
+        }
+        puts $fp "\t\t\t\],"
+        puts $fp "\t\t\t\"site_pips\": \["
+        foreach site_pip [get_site_pips -of_objects $site] {
+            puts $fp "\t\t\t\{"
+            # site_pips properties:
+            # CLASS FROM_PIN IS_FIXED IS_USED NAME SITE TO_PIN
+            puts $fp "\t\t\t\t\"site_pip\":\"$site_pip\","
+            puts $fp "\t\t\t\t\"to_pin\":\"[get_property TO_PIN $site_pip]\","
+            puts $fp "\t\t\t\t\"from_pin\":\"[get_property FROM_PIN $site_pip]\","
+            puts $fp "\t\t\t\},"
+        }
+        puts $fp "\t\t\t\],"
+
+        puts $fp "\t\t\t\"package_pins\": \["
+        foreach package_pin [get_package_pins -of_objects $site] {
+            puts $fp "\t\t\t\t\{"
+            puts $fp "\t\t\t\t\t\"package_pin\":\"$package_pin\","
+            puts $fp "\t\t\t\t\},"
+        }
+        puts $fp "\t\t\t\],"
+
+        puts $fp "\t\t\},"
+    }
+    puts $fp "\t\],"
+    puts $fp "\t\"pips\": \["
+    foreach pip [get_pips -of_objects $tile] {
+        # pip properties:
+        # CAN_INVERT CLASS IS_BUFFERED_2_0 IS_BUFFERED_2_1 IS_DIRECTIONAL
+        # IS_EXCLUDED_PIP IS_FIXED_INVERSION IS_INVERTED IS_PSEUDO IS_SITE_PIP
+        # IS_TEST_PIP NAME SPEED_INDEX TILE
+        puts $fp "\t\t\{"
+        puts $fp "\t\t\t\"pip\":\"$pip\","
+
+        set pip_speed_model_index [get_property SPEED_INDEX $pip]
+        puts $fp "\t\t\t\"speed_model_index\":\"$pip_speed_model_index\","
+        puts $fp "\t\t\t\"src_wire\":\"[get_wires -uphill -of_objects $pip]\","
+        puts $fp "\t\t\t\"dst_wire\":\"[get_wires -downhill -of_objects $pip]\","
+        puts $fp "\t\t\t\"is_pseudo\":\"[get_property IS_PSEUDO $pip]\","
+        puts $fp "\t\t\t\"is_directional\":\"[get_property IS_DIRECTIONAL $pip]\","
+        puts $fp "\t\t\t\"can_invert\":\"[get_property CAN_INVERT $pip]\","
+        puts $fp "\t\t\},"
+    }
+    puts $fp "\t\],"
+
+    puts $fp "\t\"wires\": \["
+    foreach wire [get_wires -of_objects $tile] {
+        # wire properties:
+        # CLASS COST_CODE ID_IN_TILE_TYPE IS_CONNECTED IS_INPUT_PIN IS_OUTPUT_PIN
+        # IS_PART_OF_BUS NAME NUM_DOWNHILL_PIPS NUM_INTERSECTS NUM_PIPS
+        # NUM_TILE_PORTS NUM_UPHILL_PIPS SPEED_INDEX TILE_NAME TILE_PATTERN_OFFSET
+        puts $fp "\t\t\{"
+        puts $fp "\t\t\t\"wire\":\"$wire\","
+
+        set wire_speed_model_index [get_property SPEED_INDEX $wire]
+        puts $fp "\t\t\t\t\"speed_model_index\":\"$wire_speed_model_index\","
+        puts $fp "\t\t\},"
+    }
+    puts $fp "\t\],"
+    puts $fp "\}"
+    close $fp
+}
+
+close $root_fp
diff --git a/fuzzers/004-tileinfo/reduce_site_types.py b/fuzzers/004-tileinfo/reduce_site_types.py
new file mode 100644
index 0000000..807f73e
--- /dev/null
+++ b/fuzzers/004-tileinfo/reduce_site_types.py
@@ -0,0 +1,62 @@
+""" Reduce sites types to prototypes that are always correct.
+
+reduce_tile_types.py generates per tile type site types.  reduce_site_types.py
+takes all site types across all tiles and creates generic site types that are
+valid for all tile types.
+
+"""
+
+import argparse
+import utils.lib
+import os
+import os.path
+import re
+import json
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description="Reduces per tile site types to generic site types.")
+    parser.add_argument('--output_dir', required=True)
+
+    args = parser.parse_args()
+
+    SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$')
+    site_types = {}
+    for path in os.listdir(args.output_dir):
+        match = SITE_TYPE.fullmatch(path)
+        if match is None:
+            continue
+
+        site_type = match.group(2)
+        if site_type not in site_types:
+            site_types[site_type] = []
+
+        site_types[site_type].append(path)
+
+    for site_type in site_types:
+        proto_site_type = None
+        for instance in site_types[site_type]:
+            with open(os.path.join(args.output_dir, instance)) as f:
+                instance_site_type = json.load(f)
+
+                for site_pin in instance_site_type['site_pins'].values():
+                    if 'index_in_site' in site_pin:
+                        del site_pin['index_in_site']
+
+            if proto_site_type is None:
+                proto_site_type = instance_site_type
+            else:
+                utils.lib.compare_prototype_site(
+                    proto_site_type,
+                    instance_site_type,
+                )
+
+        with open(
+                os.path.join(args.output_dir,
+                             'site_type_{}.json'.format(site_type)), 'w') as f:
+            json.dump(proto_site_type, f)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/fuzzers/004-tileinfo/reduce_tile_types.py b/fuzzers/004-tileinfo/reduce_tile_types.py
new file mode 100644
index 0000000..c018b76
--- /dev/null
+++ b/fuzzers/004-tileinfo/reduce_tile_types.py
@@ -0,0 +1,489 @@
+""" Reduce tile types to prototypes that are always correct.
+
+The dump-all generate.tcl dumps all instances of each tile type.  Some tiles
+are missing wires.  reduce_tile_types.py generates the superset tile that
+encompases all tiles of that type.  If it is not possible to generate a super
+set tile, an error will be generated.
+
+"""
+
+import argparse
+import utils.lib
+import utils.node_lookup
+import datetime
+import subprocess
+import os.path
+import pyjson5 as json5
+import progressbar
+import multiprocessing
+import os
+import functools
+import json
+from utils.xjson import extract_numbers
+
+
+def check_and_strip_prefix(name, prefix):
+    assert name.startswith(prefix), repr((name, prefix))
+    return name[len(prefix):]
+
+
+def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires):
+    def inner():
+        for site_pin in site_pins:
+            wires = tuple(site_pin_node_to_wires(tile, site_pin['node']))
+
+            if len(wires) == 0:
+                yield (check_and_strip_prefix(site_pin['site_pin'],
+                                              site + '/'), None)
+                continue
+
+            if len(wires) == 1:
+                wire = wires[0]
+            else:
+                node = check_and_strip_prefix(site_pin['node'], tile + '/')
+                assert node in wires
+                wire = node
+
+            pin_info = {
+                'wire': wire,
+                'speed_model_index': site_pin['speed_model_index'],
+            }
+
+            yield (check_and_strip_prefix(site_pin['site_pin'], site + '/'),
+                   pin_info)
+
+    return dict(inner())
+
+
+def compare_sites_and_update(tile, sites, new_sites):
+    for site_a, site_b in zip(sites, new_sites):
+        assert site_a['type'] == site_b['type']
+        assert site_a['site_pins'].keys() == site_b['site_pins'].keys()
+
+        for site_pin in site_a['site_pins']:
+            if site_a['site_pins'][site_pin] is not None and site_b[
+                    'site_pins'][site_pin] is not None:
+                assert site_a['site_pins'][site_pin] == site_b['site_pins'][
+                    site_pin]
+            elif site_a['site_pins'][site_pin] is None and site_b['site_pins'][
+                    site_pin] is not None:
+                site_a['site_pins'][site_pin] = site_b['site_pins'][site_pin]
+
+
+def get_prototype_site(site):
+    proto = {}
+    proto['type'] = site['type']
+    proto['site_pins'] = {}
+    proto['site_pips'] = {}
+    for site_pin in site['site_pins']:
+        name = check_and_strip_prefix(site_pin['site_pin'], site['site'] + '/')
+
+        proto['site_pins'][name] = {
+            'direction': site_pin['direction'],
+        }
+
+    for site_pip in site['site_pips']:
+        name = check_and_strip_prefix(site_pip['site_pip'], site['site'] + '/')
+
+        proto['site_pips'][name] = {
+            'to_pin': site_pip['to_pin'],
+            'from_pin': site_pip['from_pin'],
+        }
+
+    return proto
+
+
+def get_pips(tile, pips):
+    proto_pips = {}
+
+    for pip in pips:
+        name = check_and_strip_prefix(pip['pip'], tile + '/')
+
+        proto_pips[name] = {
+            'src_wire':
+            check_and_strip_prefix(pip['src_wire'], tile + '/')
+            if pip['src_wire'] is not None else None,
+            'dst_wire':
+            check_and_strip_prefix(pip['dst_wire'], tile + '/')
+            if pip['dst_wire'] is not None else None,
+            'is_pseudo':
+            pip['is_pseudo'],
+            'is_directional':
+            pip['is_directional'],
+            'can_invert':
+            pip['can_invert'],
+            'speed_model_index':
+            pip['speed_model_index'],
+        }
+
+    return proto_pips
+
+
+def compare_and_update_pips(pips, new_pips):
+    # Pip names are always the same, but sometimes the src_wire or dst_wire
+    # may be missing.
+
+    assert pips.keys() == new_pips.keys(), repr((pips.keys(), new_pips.keys()))
+    for name in pips:
+        if pips[name]['src_wire'] is not None and new_pips[name][
+                'src_wire'] is not None:
+            assert pips[name]['src_wire'] == new_pips[name]['src_wire'], repr((
+                pips[name]['src_wire'],
+                new_pips[name]['src_wire'],
+            ))
+        elif pips[name]['src_wire'] is None and new_pips[name][
+                'src_wire'] is not None:
+            pips[name]['src_wire'] = new_pips[name]['src_wire']
+
+        if pips[name]['dst_wire'] is not None and new_pips[name][
+                'dst_wire'] is not None:
+            assert pips[name]['dst_wire'] == new_pips[name]['dst_wire'], repr((
+                pips[name]['dst_wire'],
+                new_pips[name]['dst_wire'],
+            ))
+        elif pips[name]['dst_wire'] is None and new_pips[name][
+                'dst_wire'] is not None:
+            pips[name]['dst_wire'] = new_pips[name]['dst_wire']
+
+        for k in ['is_pseudo', 'is_directional', 'can_invert']:
+            assert pips[name][k] == new_pips[name][k], (k, pips[name][k],
+                                                        new_pips[name][k])
+
+
+def check_wires(wires, sites, pips):
+    """ Verify that the wires generates from nodes are a superset of wires in
+      sites and pips """
+    if sites is not None:
+        for site in sites:
+            for wire_to_site_pin in site['site_pins'].values():
+                if wire_to_site_pin is not None:
+                    assert wire_to_site_pin['wire'] in wires, repr(
+                        (wire_to_site_pin, wires))
+
+    if pips is not None:
+        for pip in pips.values():
+            if pip['src_wire'] is not None:
+                assert pip['src_wire'] in wires, repr((pip['src_wire'], wires))
+            if pip['dst_wire'] is not None:
+                assert pip['dst_wire'] in wires, repr((pip['dst_wire'], wires))
+
+
+def get_sites(tile, site_pin_node_to_wires):
+    for site in tile['sites']:
+        min_x_coord, min_y_coord = utils.lib.find_origin_coordinate(
+            site['site'], (site['site'] for site in tile['sites']))
+
+        orig_site_name = site['site']
+        coordinate = utils.lib.get_site_coordinate_from_name(orig_site_name)
+
+        x_coord = coordinate.x_coord - min_x_coord
+        y_coord = coordinate.y_coord - min_y_coord
+
+        yield ({
+            'name':
+            'X{}Y{}'.format(x_coord, y_coord),
+            'prefix':
+            coordinate.prefix,
+            'x_coord':
+            x_coord,
+            'y_coord':
+            y_coord,
+            'type':
+            site['type'],
+            'site_pins':
+            dict(
+                flatten_site_pins(tile['tile'], site['site'],
+                                  site['site_pins'], site_pin_node_to_wires)),
+        })
+
+
+def read_json5(fname, database_file):
+    node_lookup = utils.node_lookup.NodeLookup(database_file)
+
+    with open(fname) as f:
+        tile = json5.load(f)
+
+    def get_site_types():
+        for site in tile['sites']:
+            yield get_prototype_site(site)
+
+    site_types = tuple(get_site_types())
+    sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
+    pips = get_pips(tile['tile'], tile['pips'])
+
+    def inner():
+        for wire in tile['wires']:
+            assert wire['wire'].startswith(tile['tile'] + '/')
+
+            wire_speed_model_index = wire['speed_model_index']
+
+            yield wire['wire'][len(tile['tile']) + 1:], wire_speed_model_index
+
+    wires = {k: v for (k, v) in inner()}
+    wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
+    assert len(wires_from_nodes - wires.keys()) == 0, repr((wires,
+                                                            wires_from_nodes))
+
+    return fname, tile, site_types, sites, pips, wires
+
+
+def compare_and_update_wires(wires, new_wires):
+    for wire in new_wires:
+        if wire not in wires:
+            wires[wire] = new_wires
+        else:
+            assert wires[wire] == new_wires[wire]
+
+
+def get_speed_model_indices(reduced_tile):
+    """ Extracts the speed model indices for the data structure """
+
+    speed_model_indices = set()
+
+    for site in reduced_tile['sites']:
+        for site_pin in site['site_pins'].keys():
+            if site['site_pins'][site_pin] is None:
+                continue
+
+            speed_model_indices.add('site_pin,{}'.format(
+                site['site_pins'][site_pin]['speed_model_index']))
+
+    for pip in reduced_tile['pips'].keys():
+        speed_model_indices.add('pip,{}'.format(
+            reduced_tile['pips'][pip]['speed_model_index']))
+
+    for wire in reduced_tile['wires'].keys():
+        speed_model_indices.add('wire,{}'.format(reduced_tile['wires'][wire]))
+
+    return speed_model_indices
+
+
+def annotate_pips_speed_model(pips, speed_data):
+    """ Updates the pips with correct timing data """
+
+    for pip_name, pip_data in pips.items():
+        speed_model_index = pip_data['speed_model_index']
+
+        pip_speed_data = speed_data[speed_model_index]
+        assert pip_speed_data['resource_name'] == 'pip', (
+            pip_speed_data['resource_name'], speed_model_index)
+
+        pips[pip_name]['is_pass_transistor'] = pip_speed_data[
+            'is_pass_transistor']
+        pips[pip_name]['src_to_dst'] = {
+            'delay': pip_speed_data.get('forward_delay', None),
+            'in_cap': pip_speed_data.get('forward_in_cap', None),
+            'res': pip_speed_data.get('forward_res', None),
+        }
+        pips[pip_name]['dst_to_src'] = {
+            'delay': pip_speed_data.get('reverse_delay', None),
+            'in_cap': pip_speed_data.get('reverse_in_cap', None),
+            'res': pip_speed_data.get('reverse_res', None),
+        }
+
+        del pips[pip_name]['speed_model_index']
+
+
+def annotate_site_pins_speed_model(site_pins, speed_data):
+    """ Updates the site_pins with correct timing data """
+
+    for site_pin_name, pin_data in site_pins.items():
+        if pin_data is None:
+            continue
+
+        speed_model_index = pin_data['speed_model_index']
+
+        pin_speed_data = speed_data[speed_model_index]
+        assert pin_speed_data['resource_name'] == 'site_pin', (
+            pin_speed_data['resource_name'], speed_model_index)
+
+        site_pins[site_pin_name]['delay'] = pin_speed_data['delay']
+
+        cap = pin_speed_data['cap']
+        res = pin_speed_data['res']
+        if cap != 'null':
+            site_pins[site_pin_name]['cap'] = cap
+        if res != 'null':
+            site_pins[site_pin_name]['res'] = res
+
+        del site_pins[site_pin_name]['speed_model_index']
+
+
+def annotate_wires_speed_model(wires, speed_data):
+    """ Updates the wires with correct timing data """
+
+    for wire_name, wire_data in wires.items():
+        speed_model_index = wire_data
+
+        wire_speed_data = speed_data[speed_model_index]
+        assert wire_speed_data['resource_name'] == 'wire', (
+            wire_speed_data['resource_name'], speed_model_index)
+
+        cap = wire_speed_data['cap']
+        res = wire_speed_data['res']
+        if cap != '0.000' or res != '0.000':
+            wires[wire_name] = {
+                'cap': cap,
+                'res': res,
+            }
+        else:
+            wires[wire_name] = None
+
+
+def annotate_speed_model(tile_type, reduced_tile, root_dir):
+    """ Updates the reduced tile with the correct speed information """
+
+    speed_model_indices = get_speed_model_indices(reduced_tile)
+
+    tmp_indices_file = os.path.join(root_dir,
+                                    '{}_speed_index.tmp'.format(tile_type))
+
+    with open(tmp_indices_file, "w") as f:
+        for index in speed_model_indices:
+            print(index, file=f)
+
+    # Get vivado path
+    vivado = os.getenv('URAY_VIVADO')
+    assert vivado is not None
+
+    subprocess.check_call(
+        "{} -mode batch -source get_speed_model.tcl -tclargs {}".format(
+            vivado, tmp_indices_file),
+        shell=True,
+        stdout=subprocess.DEVNULL)
+
+    with open(tmp_indices_file, "r") as f:
+        speed_model_data = json5.load(f)
+
+    for site in reduced_tile['sites']:
+        annotate_site_pins_speed_model(site['site_pins'], speed_model_data)
+
+    annotate_pips_speed_model(reduced_tile['pips'], speed_model_data)
+    annotate_wires_speed_model(reduced_tile['wires'], speed_model_data)
+
+
+def reduce_tile(pool, site_types, tile_type, tile_instances, database_file):
+    sites = None
+    pips = None
+    wires = None
+
+    with progressbar.ProgressBar(max_value=len(tile_instances)) as bar:
+        chunksize = 1
+        if len(tile_instances) < chunksize * 2:
+            iter = map(lambda file: read_json5(file, database_file),
+                       tile_instances)
+        else:
+            print('{} Using pool.imap_unordered'.format(
+                datetime.datetime.now()))
+            iter = pool.imap_unordered(
+                functools.partial(read_json5, database_file=database_file),
+                tile_instances,
+                chunksize=chunksize,
+            )
+
+        for idx, (fname, tile, new_site_types, new_sites, new_pips,
+                  new_wires) in enumerate(iter):
+            bar.update(idx)
+
+            assert tile['type'] == tile_type, repr((tile['tile'], tile_type))
+
+            for site_type in new_site_types:
+                if site_type['type'] in site_types:
+                    utils.lib.compare_prototype_site(
+                        site_type, site_types[site_type['type']])
+                else:
+                    site_types[site_type['type']] = site_type
+
+            # Sites are expect to always be the same
+            if sites is None:
+                sites = new_sites
+            else:
+                compare_sites_and_update(tile['tile'], sites, new_sites)
+
+            if pips is None:
+                pips = new_pips
+            else:
+                compare_and_update_pips(pips, new_pips)
+
+            if wires is None:
+                wires = new_wires
+            else:
+                compare_and_update_wires(wires, new_wires)
+
+            bar.update(idx + 1)
+
+    check_wires(wires, sites, pips)
+
+    return {
+        'tile_type': tile_type,
+        'sites': sites,
+        'pips': pips,
+        'wires': wires,
+    }
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=
+        "Reduces raw database dump into prototype tiles, grid, and connections."
+    )
+    parser.add_argument('--root_dir', required=True)
+    parser.add_argument('--output_dir', required=True)
+    parser.add_argument('--ignore_cache', action='store_true')
+
+    args = parser.parse_args()
+
+    print('{} Reading root.csv'.format(datetime.datetime.now()))
+    tiles, nodes = utils.lib.read_root_csv(args.root_dir)
+
+    print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
+    database_file = os.path.join(args.output_dir, 'nodes.db')
+    if os.path.exists(database_file) and not args.ignore_cache:
+        node_lookup = utils.node_lookup.NodeLookup(database_file)
+    else:
+        node_lookup = utils.node_lookup.NodeLookup(database_file)
+        node_lookup.build_database(nodes=nodes, tiles=tiles)
+
+    site_types = {}
+
+    processes = multiprocessing.cpu_count()
+    print('Running {} processes'.format(processes))
+    pool = multiprocessing.Pool(processes=processes)
+
+    for tile_type in sorted(tiles.keys()):
+        #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
+        tile_type_file = os.path.join(args.output_dir,
+                                      'tile_type_{}.json'.format(tile_type))
+        site_types = {}
+        if os.path.exists(tile_type_file):
+            print('{} Skip reduced tile for {}'.format(datetime.datetime.now(),
+                                                       tile_type))
+            continue
+        print('{} Generating reduced tile for {}'.format(
+            datetime.datetime.now(), tile_type))
+        reduced_tile = reduce_tile(pool, site_types, tile_type,
+                                   tiles[tile_type], database_file)
+
+        # FIXME: Need to determine URAY timing model structure.
+        # annotate_speed_model(tile_type, reduced_tile, args.root_dir)
+
+        for site_type in site_types:
+            with open(
+                    os.path.join(
+                        args.output_dir,
+                        'tile_type_{}_site_type_{}.json'.format(
+                            tile_type, site_types[site_type]['type'])),
+                    'w') as f:
+                json.dump(site_types[site_type], f, indent=2, sort_keys=True)
+
+        reduced_tile['sites'] = sorted(
+            reduced_tile['sites'],
+            key=lambda site: extract_numbers(
+                '{}_{}'.format(site['name'], site['prefix'])))
+
+        with open(tile_type_file, 'w') as f:
+            json.dump(reduced_tile, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/fuzzers/004-tileinfo/run_fuzzer.py b/fuzzers/004-tileinfo/run_fuzzer.py
new file mode 100644
index 0000000..e979bb2
--- /dev/null
+++ b/fuzzers/004-tileinfo/run_fuzzer.py
@@ -0,0 +1,162 @@
+import shutil
+import sys
+import subprocess
+from multiprocessing import Pool
+from itertools import chain
+import argparse
+
+# Can be used to redirect vivado tons of output
+# stdout=DEVNULL in subprocess.check_call
+
+
+# Worker function called from threads
+def start_tiles(argList):
+    blockID, start, stop, total = argList
+    print("Running instance :" + str(blockID) + " / " + str(total))
+    subprocess.check_call(
+        "${URAY_VIVADO} -mode batch -source $FUZDIR/jobtiles.tcl -tclargs " +
+        str(blockID) + " " + str(start) + " " + str(stop),
+        shell=True)
+
+
+def start_nodes(argList):
+    blockID, start, stop, total = argList
+    print("Running instance :" + str(blockID) + " / " + str(total))
+    subprocess.check_call(
+        "${URAY_VIVADO} -mode batch -source $FUZDIR/jobnodes.tcl -tclargs " +
+        str(blockID) + " " + str(start) + " " + str(stop),
+        shell=True)
+
+
+# Function called once to get the total numbers of tiles to list
+def get_nb_tiles():
+    print("Fetching total number of tiles")
+    subprocess.check_call(
+        "${URAY_VIVADO} -mode batch -source $FUZDIR/get_tilescount.tcl",
+        shell=True)
+    countfile = open("nb_tiles.txt", "r")
+    return int(countfile.readline())
+
+
+def get_nb_nodes():
+    print("Fetching total number of nodes")
+    subprocess.check_call(
+        "${URAY_VIVADO} -mode batch -source $FUZDIR/get_nodescount.tcl",
+        shell=True)
+    countfile = open("nb_nodes.txt", "r")
+    return int(countfile.readline())
+
+
+def run_pool(itemcount, nbBlocks, blocksize, nbParBlock, workFunc):
+    # We handle the case of not integer multiple of pips
+    intitemcount = blocksize * nbBlocks
+    lastRun = False
+    modBlocks = itemcount - intitemcount
+    if modBlocks != 0:
+        lastRun = True
+        nbBlocks = nbBlocks + 1
+
+    print("Items Count: " + str(itemcount) + " - Number of blocks: " +
+          str(nbBlocks) + " - Parallel blocks: " + str(nbParBlock) +
+          " - Blocksize: " + str(blocksize) + " - Modulo Blocks: " +
+          str(modBlocks))
+
+    blockId = range(0, nbBlocks)
+    startI = range(0, intitemcount, blocksize)
+    stopI = range(blocksize, intitemcount + 1, blocksize)
+    totalBlock = [nbBlocks for _ in range(nbBlocks)]
+
+    # In case we have a last incomplete block we add it as a last
+    # element in the arguments list
+    if lastRun == True:
+        startI = chain(startI, [intitemcount])
+        stopI = chain(stopI, [itemcount])
+
+    argList = zip(blockId, startI, stopI, totalBlock)
+
+    with Pool(processes=nbParBlock) as pool:
+        pool.map(workFunc, argList)
+
+    return nbBlocks
+
+
+# ==========================================================================
+# ===== FPGA Logic Items data ==============================================
+# For Artix 7 50T:
+#   - Total pips: 22002368
+#   - Total tiles: 18055
+#   - Total nodes: 1953452
+# For Kintex 7 70T:
+#   - Total pips: 29424910
+#   - Total tiles: 24453
+#   - Total nodes: 2663055
+# For Zynq 7 z010:
+#   - Total pips: 12462138
+#   - Total tiles: 13440
+#   - Total nodes: 1122477
+# =========================================================================
+# Dividing by about 64 over 4 core is not optimized but a default to run
+# on most computer
+# =========================================================================
+
+
+def main(argv):
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "-p",
+        "--nbPar",
+        help="Number of parallel instances of Vivado",
+        type=int,
+        default=4)
+    parser.add_argument(
+        "-t",
+        "--sizeTilesBlock",
+        help="Define the number of tiles to process per instance",
+        type=int,
+        default=300)
+    parser.add_argument(
+        "-n",
+        "--sizeNodesBlock",
+        help="Define the number of nodes to process per instance",
+        type=int,
+        default=30000)
+    args = parser.parse_args()
+
+    nbParBlock = args.nbPar
+    blockTilesSize = args.sizeTilesBlock
+    blockNodesSize = args.sizeNodesBlock
+
+    print(" nbPar: " + str(nbParBlock) + " blockTilesSize: " +
+          str(blockTilesSize) + " blockNodesSize: " + str(blockNodesSize))
+
+    tilescount = get_nb_tiles()
+    nbTilesBlocks = int(tilescount / blockTilesSize)
+
+    tilesFileCount = run_pool(tilescount, nbTilesBlocks, blockTilesSize,
+                              nbParBlock, start_tiles)
+
+    nodescount = get_nb_nodes()
+    nbNodesBlocks = int(nodescount / blockNodesSize)
+
+    nodeFilesCount = run_pool(nodescount, nbNodesBlocks, blockNodesSize,
+                              nbParBlock, start_nodes)
+
+    print("Generating final csv files")
+
+    with open("root.csv", "w") as wfd:
+        wfd.write("filetype,subtype,filename\n")
+        for j in range(0, tilesFileCount):
+            ftiles = "root_" + str(j) + ".csv"
+            with open(ftiles, "r") as fd:
+                shutil.copyfileobj(fd, wfd)
+        for j in range(0, nodeFilesCount):
+            fnodes = "root_node_" + str(j) + ".csv"
+            with open(fnodes, "r") as fd:
+                shutil.copyfileobj(fd, wfd)
+
+    print("Work done !")
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
diff --git a/utils/node_lookup.py b/utils/node_lookup.py
new file mode 100644
index 0000000..c8669f4
--- /dev/null
+++ b/utils/node_lookup.py
@@ -0,0 +1,112 @@
+import sqlite3
+import progressbar
+import pyjson5 as json5
+import os.path
+
+
+def create_tables(conn):
+    c = conn.cursor()
+
+    c.execute(
+        """CREATE TABLE tile(
+    pkey INTEGER PRIMARY KEY,
+    name TEXT
+    );""")
+    c.execute(
+        """CREATE TABLE node(
+    pkey INTEGER PRIMARY KEY,
+    name TEXT
+    );""")
+    c.execute(
+        """CREATE TABLE wire(
+    pkey INTEGER PRIMARY KEY,
+    name TEXT,
+    node_pkey INTEGER,
+    tile_pkey INTEGER,
+    FOREIGN KEY(node_pkey) REFERENCES node(pkey),
+    FOREIGN KEY(tile_pkey) REFERENCES tile(pkey)
+    );""")
+
+    conn.commit()
+
+
+class NodeLookup(object):
+    def __init__(self, database):
+        self.conn = sqlite3.connect(database)
+
+    def build_database(self, nodes, tiles):
+        create_tables(self.conn)
+
+        c = self.conn.cursor()
+        tile_names = []
+        for tile_type in tiles:
+            for tile in tiles[tile_type]:
+                tile_names.append(tile)
+
+        tile_pkeys = {}
+        for tile_file in progressbar.progressbar(tile_names):
+            # build/specimen_001/tile_DSP_L_X34Y145.json5
+            root, _ = os.path.splitext(os.path.basename(tile_file))
+            tile = root[5:]
+            c.execute("INSERT INTO tile(name) VALUES (?);", (tile, ))
+            tile_pkeys[tile] = c.lastrowid
+
+        nodes_processed = set()
+        for node in progressbar.progressbar(nodes):
+            with open(node) as f:
+                node_wires = json5.load(f)
+                assert node_wires['node'] not in nodes_processed
+                nodes_processed.add(node_wires['node'])
+
+                c.execute(
+                    "INSERT INTO node(name) VALUES (?);",
+                    (node_wires['node'], ))
+                node_pkey = c.lastrowid
+
+                for wire in node_wires['wires']:
+                    tile = wire['wire'].split('/')[0]
+
+                    tile_pkey = tile_pkeys[tile]
+                    c.execute(
+                        """
+INSERT INTO wire(name, tile_pkey, node_pkey) VALUES (?, ?, ?);""",
+                        (wire['wire'], tile_pkey, node_pkey))
+
+        self.conn.commit()
+
+        c = self.conn.cursor()
+        c.execute("CREATE INDEX tile_names ON tile(name);")
+        c.execute("CREATE INDEX node_names ON node(name);")
+        c.execute("CREATE INDEX wire_node_tile ON wire(node_pkey, tile_pkey);")
+        c.execute("CREATE INDEX wire_tile ON wire(tile_pkey);")
+        self.conn.commit()
+
+    def site_pin_node_to_wires(self, tile, node):
+        if node is None:
+            return
+
+        c = self.conn.cursor()
+        c.execute(
+            """
+WITH
+    the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?),
+    the_node(node_pkey) AS (SELECT pkey AS node_pkey FROM node WHERE name = ?)
+SELECT wire.name FROM wire
+    INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey
+    INNER JOIN the_node ON the_node.node_pkey = wire.node_pkey;
+""", (tile, node))
+
+        for row in c:
+            yield row[0][len(tile) + 1:]
+
+    def wires_for_tile(self, tile):
+        c = self.conn.cursor()
+        c.execute(
+            """
+WITH
+    the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?)
+SELECT wire.name FROM wire
+    INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey;
+""", (tile, ))
+        for row in c:
+            yield row[0][len(tile) + 1:]