Merge branch 'master' of https://github.com/YosysHQ/prjtrellis into facade
diff --git a/diamond.sh b/diamond.sh
index 6a69b2e..d54ee31 100755
--- a/diamond.sh
+++ b/diamond.sh
@@ -58,7 +58,7 @@
 else
 	export LD_LIBRARY_PATH="${bindir}:${fpgabindir}"
 fi
-export LM_LICENSE_FILE="${diamonddir}/license/license.dat"
+export LM_LICENSE_FILE="${LM_LICENSE_FILE:=${diamonddir}/license/license.dat}"
 
 set -ex
 if [[ $2 == *.ncl ]]
diff --git a/diamond_tcl.sh b/diamond_tcl.sh
index 0bbb4f6..36db79e 100755
--- a/diamond_tcl.sh
+++ b/diamond_tcl.sh
@@ -41,7 +41,7 @@
 else
 	export LD_LIBRARY_PATH="${bindir}:${fpgabindir}"
 fi
-export LM_LICENSE_FILE="${diamonddir}/license/license.dat"
+export LM_LICENSE_FILE="${LM_LICENSE_FILE:=${diamonddir}/license/license.dat}"
 
 if $WINDOWS; then
     $FOUNDRY/userware/NT/bin/nt64/ispTcl $1
diff --git a/docs/.gitignore b/docs/.gitignore
index 13856a1..e35d885 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -1,2 +1 @@
-venv
 _build
diff --git a/docs/Makefile b/docs/Makefile
index cb6f107..b52c668 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -1,15 +1,13 @@
 # Minimal makefile for Sphinx documentation
 #
 
-MAKEDIR := $(dir $(lastword $(MAKEFILE_LIST)))
-
 # You can set these variables from the command line.
-SPHINXOPTS      =
-SPHINXBUILD     = [ -e venv/bin/activate ] && source venv/bin/activate; sphinx-build
-SPHINXAUTOBUILD = [ -e venv/bin/activate ] && source venv/bin/activate; sphinx-autobuild
-SPHINXPROJ      = ProjectTrellis
-SOURCEDIR       = .
-BUILDDIR        = _build
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+SPHINXAUTOBUILD   = sphinx-autobuild
+SPHINXPROJ    = ProjectX-Ray
+SOURCEDIR     = .
+BUILDDIR      = _build
 
 # Put it first so that "make" without argument is like "make help".
 help:
@@ -18,21 +16,9 @@
 livehtml:
 	@$(SPHINXAUTOBUILD) -b html --ignore \*.swp --ignore \*~ $(SPHINXOPTS) "$(SOURCEDIR)" "$(BUILDDIR)/html"
 
-.PHONY: help livehtml Makefile
-
-venv:
-	rm -rf venv
-	virtualenv --python=python3 venv
-	source venv/bin/activate; pip install -r requirements.txt
-
-.PHONY: venv
-
-links:
-	@true
-
-.PHONY: links
+.PHONY: help livereload Makefile
 
 # Catch-all target: route all unknown targets to Sphinx using the new
 # "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile links
+%: Makefile
 	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/_static/.keepme b/docs/_static/.keepme
deleted file mode 100644
index e69de29..0000000
--- a/docs/_static/.keepme
+++ /dev/null
diff --git a/docs/conf.py b/docs/conf.py
index e847b58..8989469 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -24,7 +24,7 @@
 import os
 import sys
 sys.path.insert(0, os.path.abspath('.'))
-from markdown_code_symlinks import LinkParser, MarkdownSymlinksDomain
+from markdown_code_symlinks import MarkdownCodeSymlinks
 
 # -- General configuration ------------------------------------------------
 
@@ -36,13 +36,8 @@
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
 extensions = [
-    'sphinx.ext.autodoc',
-    'sphinx.ext.autosummary',
-    'sphinx.ext.doctest',
-    'sphinx.ext.imgmath',
-    'sphinx.ext.napoleon',
-    'sphinx.ext.todo',
-    'sphinx_markdown_tables',
+    'sphinx.ext.imgmath', 'sphinx.ext.autodoc', 'sphinx.ext.doctest',
+    'sphinx.ext.autosummary', 'sphinx.ext.napoleon', 'sphinx.ext.todo'
 ]
 
 # Add any paths that contain templates here, relative to this directory.
@@ -52,7 +47,7 @@
 # You can specify multiple suffix as a list of string:
 source_suffix = ['.rst', '.md']
 source_parsers = {
-    '.md': 'markdown_code_symlinks.LinkParser',
+    '.md': 'recommonmark.parser.CommonMarkParser',
 }
 
 # The master toctree document.
@@ -63,24 +58,6 @@
 copyright = u'2018, SymbiFlow Team'
 author = u'SymbiFlow Team'
 
-# Enable github links when not on readthedocs
-on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
-if not on_rtd:
-    html_context = {
-        "display_github": True,  # Integrate GitHub
-        "github_user": "symbiflow",  # Username
-        "github_repo": "prjtrellis",  # Repo name
-        "github_version": "master",  # Version
-        "conf_py_path": "/doc/",
-    }
-else:
-    docs_dir = os.path.abspath(os.path.dirname(__file__))
-    print("Docs dir is:", docs_dir)
-    import subprocess
-    subprocess.call('git fetch origin --unshallow', cwd=docs_dir, shell=True)
-    subprocess.check_call('git fetch origin --tags', cwd=docs_dir, shell=True)
-    subprocess.check_call('make links', cwd=docs_dir, shell=True)
-
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
@@ -100,10 +77,10 @@
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'venv', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
 
 # The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'default'
+pygments_style = 'sphinx'
 
 # If true, `todo` and `todoList` produce output, else they produce nothing.
 todo_include_todos = True
@@ -113,67 +90,24 @@
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
-html_theme = 'sphinx_materialdesign_theme'
+html_theme = 'sphinx_rtd_theme'
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # documentation.
 #
-html_theme_options = {
-    # Specify a list of menu in Header.
-    # Tuples forms:
-    #  ('Name', 'external url or path of pages in the document', boolean, 'icon name')
-    #
-    # Third argument:
-    # True indicates an external link.
-    # False indicates path of pages in the document.
-    #
-    # Fourth argument:
-    # Specify the icon name.
-    # For details see link.
-    # https://material.io/icons/
-    'header_links': [
-        ('Home', 'index', False, 'home'),
-        ("GitHub", "https://github.com/SymbiFlow/prjtrellis", True, 'link')
-    ],
+# html_theme_options = {}
 
-    # Customize css colors.
-    # For details see link.
-    # https://getmdl.io/customize/index.html
-    #
-    # Values: amber, blue, brown, cyan deep_orange, deep_purple, green, grey, indigo, light_blue,
-    #         light_green, lime, orange, pink, purple, red, teal, yellow(Default: indigo)
-    'primary_color':
-    'deep_purple',
-    # Values: Same as primary_color. (Default: pink)
-    'accent_color':
-    'purple',
-
-    # Customize layout.
-    # For details see link.
-    # https://getmdl.io/components/index.html#layout-section
-    'fixed_drawer':
-    True,
-    'fixed_header':
-    True,
-    'header_waterfall':
-    True,
-    'header_scroll':
-    False,
-
-    # Render title in header.
-    # Values: True, False (Default: False)
-    'show_header_title':
-    False,
-    # Render title in drawer.
-    # Values: True, False (Default: True)
-    'show_drawer_title':
-    True,
-    # Render footer.
-    # Values: True, False (Default: True)
-    'show_footer':
-    True
-}
+# Enable github links when not on readthedocs
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+if not on_rtd:
+    html_context = {
+        "display_github": True,  # Integrate GitHub
+        "github_user": "symbiflow",  # Username
+        "github_repo": "prjtrellis",  # Repo name
+        "github_version": "master",  # Version
+        "conf_py_path": "/doc/",
+    }
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
@@ -250,17 +184,9 @@
 
 
 def setup(app):
-    github_code_repo = 'https://github.com/SymbiFlow/prjtrellis/'
-    github_code_branch = 'blob/master/'
-
-    docs_root_dir = os.path.realpath(os.path.dirname(__file__))
-    code_root_dir = os.path.realpath(os.path.join(docs_root_dir, ".."))
-
-    MarkdownSymlinksDomain.init_domain(
-        github_code_repo, github_code_branch, docs_root_dir, code_root_dir)
-    MarkdownSymlinksDomain.find_links()
-    app.add_domain(MarkdownSymlinksDomain)
+    MarkdownCodeSymlinks.find_links()
     app.add_config_value(
         'recommonmark_config', {
-            'github_code_repo': github_code_repo,
+            'github_code_repo': 'https://github.com/SymbiFlow/prjtrellis',
         }, True)
+    app.add_transform(MarkdownCodeSymlinks)
diff --git a/docs/markdown_code_symlinks.py b/docs/markdown_code_symlinks.py
new file mode 100644
index 0000000..1955493
--- /dev/null
+++ b/docs/markdown_code_symlinks.py
@@ -0,0 +1,179 @@
+import logging
+import os
+
+from recommonmark import transform
+"""
+Allow linking of Markdown documentation from the source code tree into the Sphinx
+documentation tree.
+
+The Markdown documents will have links relative to the source code root, rather
+than the place they are now linked too - this code fixes these paths up.
+
+We also want links from two Markdown documents found in the Sphinx docs to
+work, so that is also fixed up.
+"""
+
+
+def path_contains(parent_path, child_path):
+    """Check a path contains another path.
+
+    >>> path_contains("a/b", "a/b")
+    True
+    >>> path_contains("a/b", "a/b/")
+    True
+    >>> path_contains("a/b", "a/b/c")
+    True
+    >>> path_contains("a/b", "c")
+    False
+    >>> path_contains("a/b", "c/b")
+    False
+    >>> path_contains("a/b", "c/../a/b/d")
+    True
+    >>> path_contains("../a/b", "../a/b/d")
+    True
+    >>> path_contains("../a/b", "../a/c")
+    False
+    >>> path_contains("a", "abc")
+    False
+    >>> path_contains("aa", "abc")
+    False
+    """
+    # Append a separator to the end of both paths to work around the fact that
+    # os.path.commonprefix does character by character comparisons rather than
+    # path segment by path segment.
+    parent_path = os.path.join(os.path.normpath(parent_path), '')
+    child_path = os.path.join(os.path.normpath(child_path), '')
+    common_path = os.path.commonprefix((parent_path, child_path))
+    return common_path == parent_path
+
+
+def relative(parent_dir, child_path):
+    """Get the relative between a path that contains another path."""
+    child_dir = os.path.dirname(child_path)
+    assert path_contains(parent_dir, child_dir), "{} not inside {}".format(
+        child_path, parent_dir)
+    return os.path.relpath(child_path, start=parent_dir)
+
+
+class MarkdownCodeSymlinks(transform.AutoStructify, object):
+    docs_root_dir = os.path.realpath(os.path.dirname(__file__))
+    code_root_dir = os.path.realpath(os.path.join(docs_root_dir, "..", ".."))
+
+    mapping = {
+        'docs2code': {},
+        'code2docs': {},
+    }
+
+    @classmethod
+    def relative_code(cls, url):
+        """Get a value relative to the code directory."""
+        return relative(cls.code_root_dir, url)
+
+    @classmethod
+    def relative_docs(cls, url):
+        """Get a value relative to the docs directory."""
+        return relative(cls.docs_root_dir, url)
+
+    @classmethod
+    def add_mapping(cls, docs_rel, code_rel):
+        assert docs_rel not in cls.mapping['docs2code'], """\
+Assertion error! Document already in mapping!
+    New Value: {}
+Current Value: {}
+""".format(docs_rel, cls.mapping['docs2code'][docs_rel])
+        assert code_rel not in cls.mapping['code2docs'], """\
+Assertion error! Document already in mapping!
+    New Value: {}
+Current Value: {}
+""".format(docs_rel, cls.mapping['code2docs'][code_rel])
+
+        cls.mapping['docs2code'][docs_rel] = code_rel
+        cls.mapping['code2docs'][code_rel] = docs_rel
+
+    @classmethod
+    def find_links(cls):
+        """Walk the docs dir and find links to docs in the code dir."""
+        for root, dirs, files in os.walk(cls.docs_root_dir):
+            for fname in files:
+                fpath = os.path.abspath(os.path.join(root, fname))
+
+                if not os.path.islink(fpath):
+                    continue
+
+                link_path = os.path.join(root, os.readlink(fpath))
+                # Is link outside the code directory?
+                if not path_contains(cls.code_root_dir, link_path):
+                    continue
+
+                # Is link internal to the docs directory?
+                if path_contains(cls.docs_root_dir, link_path):
+                    continue
+
+                docs_rel = cls.relative_docs(fpath)
+                code_rel = cls.relative_code(link_path)
+
+                cls.add_mapping(docs_rel, code_rel)
+        import pprint
+        pprint.pprint(cls.mapping)
+
+    @property
+    def url_resolver(self):
+        return self._url_resolver
+
+    @url_resolver.setter
+    def url_resolver(self, value):
+        print(self, value)
+
+    # Resolve a link from one markdown to another document.
+    def _url_resolver(self, ourl):
+        """Resolve a URL found in a markdown file."""
+        assert self.docs_root_dir == os.path.realpath(self.root_dir), """\
+Configuration error! Document Root != Current Root
+Document Root: {}
+ Current Root: {}
+""".format(self.docs_root_dir, self.root_dir)
+
+        src_path = os.path.abspath(self.document['source'])
+        src_dir = os.path.dirname(src_path)
+        dst_path = os.path.abspath(os.path.join(self.docs_root_dir, ourl))
+        dst_rsrc = os.path.relpath(dst_path, start=src_dir)
+
+        src_rdoc = self.relative_docs(src_path)
+
+        print
+        print("url_resolver")
+        print(src_path)
+        print(dst_path)
+        print(dst_rsrc)
+        print(src_rdoc)
+
+        # Is the source document a linked one?
+        if src_rdoc not in self.mapping['docs2code']:
+            # Don't do any rewriting on non-linked markdown.
+            url = ourl
+
+        # Is the destination also inside docs?
+        elif dst_rsrc not in self.mapping['code2docs']:
+            # Return a path to the GitHub repo.
+            url = "{}/blob/master/{}".format(
+                self.config['github_code_repo'], dst_rsrc)
+        else:
+            url = os.path.relpath(
+                os.path.join(
+                    self.docs_root_dir, self.mapping['code2docs'][dst_rsrc]),
+                start=src_dir)
+            base_url, ext = os.path.splitext(url)
+            assert ext in (".md",
+                           ".markdown"), ("Unknown extension {}".format(ext))
+            url = "{}.html".format(base_url)
+
+        print("---")
+        print(ourl)
+        print(url)
+        print
+        return url
+
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
diff --git a/docs/requirements.txt b/docs/requirements.txt
index ec9e872..56c1f78 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,13 +1,8 @@
-sphinx_materialdesign_theme
-
 docutils
 sphinx
 sphinx-autobuild
 
 breathe
 recommonmark
-sphinx-markdown-tables
+sphinx_rtd_theme
 sphinxcontrib-napoleon
-
-# Markdown cross-reference solver library
-git+https://github.com/SymbiFlow/sphinxcontrib-markdown-symlinks
diff --git a/examples/picorv32_tinyfpga/attosoc.v b/examples/picorv32_tinyfpga/attosoc.v
index 0b62a75..80b376f 100644
--- a/examples/picorv32_tinyfpga/attosoc.v
+++ b/examples/picorv32_tinyfpga/attosoc.v
@@ -1,8 +1,8 @@
 /*
  *  ECP5 PicoRV32 demo
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
- *  Copyright (C) 2018  David Shah <dave@ds0.me>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
+ *  Copyright (C) 2018  gatecat <gatecat@ds0.me>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/picorv32_tinyfpga/picorv32.v b/examples/picorv32_tinyfpga/picorv32.v
index af634b4..35a8c74 100644
--- a/examples/picorv32_tinyfpga/picorv32.v
+++ b/examples/picorv32_tinyfpga/picorv32.v
@@ -1,7 +1,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/picorv32_ulx3s/attosoc.v b/examples/picorv32_ulx3s/attosoc.v
index 4921e29..39d5143 100644
--- a/examples/picorv32_ulx3s/attosoc.v
+++ b/examples/picorv32_ulx3s/attosoc.v
@@ -1,8 +1,8 @@
 /*
  *  ECP5 PicoRV32 demo
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
- *  Copyright (C) 2018  David Shah <dave@ds0.me>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
+ *  Copyright (C) 2018  gatecat <gatecat@ds0.me>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/picorv32_ulx3s/picorv32.v b/examples/picorv32_ulx3s/picorv32.v
index af634b4..35a8c74 100644
--- a/examples/picorv32_ulx3s/picorv32.v
+++ b/examples/picorv32_ulx3s/picorv32.v
@@ -1,7 +1,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/picorv32_versa5g/attosoc.v b/examples/picorv32_versa5g/attosoc.v
index 4921e29..39d5143 100644
--- a/examples/picorv32_versa5g/attosoc.v
+++ b/examples/picorv32_versa5g/attosoc.v
@@ -1,8 +1,8 @@
 /*
  *  ECP5 PicoRV32 demo
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
- *  Copyright (C) 2018  David Shah <dave@ds0.me>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
+ *  Copyright (C) 2018  gatecat <gatecat@ds0.me>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/picorv32_versa5g/picorv32.v b/examples/picorv32_versa5g/picorv32.v
index af634b4..35a8c74 100644
--- a/examples/picorv32_versa5g/picorv32.v
+++ b/examples/picorv32_versa5g/picorv32.v
@@ -1,7 +1,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_ecp5_evn/attosoc.v b/examples/soc_ecp5_evn/attosoc.v
index 5761328..8fa54ae 100644
--- a/examples/soc_ecp5_evn/attosoc.v
+++ b/examples/soc_ecp5_evn/attosoc.v
@@ -1,8 +1,8 @@
 /*
  *  ECP5 PicoRV32 demo
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
- *  Copyright (C) 2018  David Shah <dave@ds0.me>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
+ *  Copyright (C) 2018  gatecat <gatecat@ds0.me>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_ecp5_evn/picorv32.v b/examples/soc_ecp5_evn/picorv32.v
index af634b4..35a8c74 100644
--- a/examples/soc_ecp5_evn/picorv32.v
+++ b/examples/soc_ecp5_evn/picorv32.v
@@ -1,7 +1,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_ecp5_evn/simpleuart.v b/examples/soc_ecp5_evn/simpleuart.v
index 50808cb..eaff021 100644
--- a/examples/soc_ecp5_evn/simpleuart.v
+++ b/examples/soc_ecp5_evn/simpleuart.v
@@ -1,7 +1,7 @@
 /*
  *  PicoSoC - A simple example SoC using PicoRV32
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_versa5g/attosoc.v b/examples/soc_versa5g/attosoc.v
index 0cc153d..48280b1 100644
--- a/examples/soc_versa5g/attosoc.v
+++ b/examples/soc_versa5g/attosoc.v
@@ -1,8 +1,8 @@
 /*
  *  ECP5 PicoRV32 demo
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
- *  Copyright (C) 2018  David Shah <dave@ds0.me>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
+ *  Copyright (C) 2018  gatecat <gatecat@ds0.me>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_versa5g/picorv32.v b/examples/soc_versa5g/picorv32.v
index af634b4..35a8c74 100644
--- a/examples/soc_versa5g/picorv32.v
+++ b/examples/soc_versa5g/picorv32.v
@@ -1,7 +1,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/examples/soc_versa5g/simpleuart.v b/examples/soc_versa5g/simpleuart.v
index 50808cb..eaff021 100644
--- a/examples/soc_versa5g/simpleuart.v
+++ b/examples/soc_versa5g/simpleuart.v
@@ -1,7 +1,7 @@
 /*
  *  PicoSoC - A simple example SoC using PicoRV32
  *
- *  Copyright (C) 2017  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2017  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/libtrellis/CMakeLists.txt b/libtrellis/CMakeLists.txt
index d838d44..21b2ace 100644
--- a/libtrellis/CMakeLists.txt
+++ b/libtrellis/CMakeLists.txt
@@ -168,4 +168,6 @@
 install(DIRECTORY ../database DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis PATTERN ".git" EXCLUDE)
 install(DIRECTORY ../misc DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis)
 install(DIRECTORY ../util/common DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis/util)
-install(DIRECTORY ../timing/util DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis/timing USE_SOURCE_PERMISSIONS)
+install(DIRECTORY ../timing/util DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis/timing)
+install(PROGRAMS ../timing/util/cell_html.py DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis/timing/util)
+install(PROGRAMS ../timing/util/cell_timings.py DESTINATION ${CMAKE_INSTALL_DATADIR}/${PROGRAM_PREFIX}trellis/timing/util)
diff --git a/libtrellis/include/DatabasePath.hpp b/libtrellis/include/DatabasePath.hpp
index 288f670..8134323 100644
--- a/libtrellis/include/DatabasePath.hpp
+++ b/libtrellis/include/DatabasePath.hpp
@@ -32,7 +32,7 @@
 /*
  *  yosys -- Yosys Open SYnthesis Suite
  *
- *  Copyright (C) 2012  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2012  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/libtrellis/src/PyTrellis.cpp b/libtrellis/src/PyTrellis.cpp
index 38ea056..674f3fa 100644
--- a/libtrellis/src/PyTrellis.cpp
+++ b/libtrellis/src/PyTrellis.cpp
@@ -250,6 +250,7 @@
 
     // From BitDatabase.cpp
     class_<ConfigBit>(m, "ConfigBit")
+            .def(init<>())
             .def_readwrite("frame", &ConfigBit::frame)
             .def_readwrite("bit", &ConfigBit::bit)
             .def_readwrite("inv", &ConfigBit::inv);
@@ -260,9 +261,11 @@
         .def("__len__", [](const std::set<ConfigBit> &v) { return v.size(); })
         .def("__iter__", [](std::set<ConfigBit> &v) {
             return py::make_iterator(v.begin(), v.end());
-        }, py::keep_alive<0, 1>()); /* Keep vector alive while iterator is used */
+        }, py::keep_alive<0, 1>()) /* Keep vector alive while iterator is used */
+        .def("add", [](std::set<ConfigBit> &v, const ConfigBit& value) { v.insert(value); });
 
     class_<BitGroup>(m, "BitGroup")
+            .def(init<>())
             .def(init<const CRAMDelta &>())
             .def_readwrite("bits", &BitGroup::bits)
             .def("match", &BitGroup::match)
@@ -273,6 +276,7 @@
     py::bind_vector<vector<BitGroup>>(m, "BitGroupVector");
 
     class_<ArcData>(m, "ArcData")
+            .def(init<>())
             .def_readwrite("source", &ArcData::source)
             .def_readwrite("sink", &ArcData::sink)
             .def_readwrite("bits", &ArcData::bits);
@@ -287,6 +291,7 @@
             .def("set_driver", &MuxBits::set_driver);
 
     class_<WordSettingBits>(m, "WordSettingBits")
+            .def(init<>())
             .def_readwrite("name", &WordSettingBits::name)
             .def_readwrite("bits", &WordSettingBits::bits)
             .def_readwrite("defval", &WordSettingBits::defval)
@@ -296,6 +301,7 @@
     py::bind_map<map<string, BitGroup>>(m, "BitGroupMap");
 
     class_<EnumSettingBits>(m, "EnumSettingBits")
+            .def(init<>())
             .def_readwrite("name", &EnumSettingBits::name)
             .def_readwrite("options", &EnumSettingBits::options)
             .def("get_options", &EnumSettingBits::get_options)
@@ -356,6 +362,7 @@
     py::bind_vector<vector<ConfigUnknown>>(m, "ConfigUnknownVector");
 
     class_<TileConfig>(m, "TileConfig")
+            .def(init<>())
             .def_readwrite("carcs", &TileConfig::carcs)
             .def_readwrite("cwords", &TileConfig::cwords)
             .def_readwrite("cenums", &TileConfig::cenums)
diff --git a/libtrellis/tools/ecpbram.cpp b/libtrellis/tools/ecpbram.cpp
index 7e424f1..0873d33 100644
--- a/libtrellis/tools/ecpbram.cpp
+++ b/libtrellis/tools/ecpbram.cpp
@@ -1,5 +1,5 @@
 //
-//  Copyright (C) 2016  Clifford Wolf <clifford@clifford.at>
+//  Copyright (C) 2016  Claire Xenia Wolf <claire@yosyshq.com>
 //  Copyright (C) 2019  Sylvain Munaut <tnt@246tNt.com>
 //
 //  Permission to use, copy, modify, and/or distribute this software for any
@@ -22,7 +22,7 @@
 #include <stdint.h>
 #ifdef _WIN32
 #define NOMINMAX
-#include "Windows.h"
+#include "windows.h"
 #undef NOMINMAX
 #else
 #include <unistd.h>
@@ -166,7 +166,7 @@
         cerr << argv[0] << ": ECP5 BRAM content initialization tool" << endl;
         cerr << endl;
         cerr << "Copyright (C) 2019  Sylvain Munaut <tnt@246tNt.com>" << endl;
-        cerr << "Copyright (C) 2016  Clifford Wolf <clifford@clifford.at>" << endl;
+        cerr << "Copyright (C) 2016  Claire Xenia Wolf <claire@yosyshq.com>" << endl;
         cerr << endl;
         cerr << options << endl;
         return vm.count("help") ? 0 : 1;
diff --git a/libtrellis/tools/ecppack.cpp b/libtrellis/tools/ecppack.cpp
index f6bf902..ab560e0 100644
--- a/libtrellis/tools/ecppack.cpp
+++ b/libtrellis/tools/ecppack.cpp
@@ -85,7 +85,7 @@
         cerr << "Version " << git_describe_str << endl;
         cerr << argv[0] << ": ECP5 bitstream packer" << endl;
         cerr << endl;
-        cerr << "Copyright (C) 2018 David Shah <david@symbioticeda.com>" << endl;
+        cerr << "Copyright (C) 2018 gatecat <gatecat@ds0.me>" << endl;
         cerr << endl;
         cerr << "Usage: " << argv[0] << " input.config [output.bit] [options]" << endl;
         cerr << options << endl;
diff --git a/libtrellis/tools/ecppll.cpp b/libtrellis/tools/ecppll.cpp
index 3764645..91a1aea 100644
--- a/libtrellis/tools/ecppll.cpp
+++ b/libtrellis/tools/ecppll.cpp
@@ -130,7 +130,7 @@
     cerr << endl;
     cerr << "This tool is experimental! Use at your own risk!" << endl;
     cerr << endl;
-    cerr << "Copyright (C) 2018-2019 David Shah <david@symbioticeda.com>" << endl;
+    cerr << "Copyright (C) 2018-2019 gatecat <gatecat@ds0.me>" << endl;
     cerr << endl;
     cerr << options << endl;
     return vm.count("help") ? 0 : 1;
@@ -390,7 +390,7 @@
 
   file << "    output locked\n";
   file << ");\n";
-  if(params.internal_feedback)
+  if(params.internal_feedback || params.mode == pll_mode::HIGHRES)
     file << "wire clkfb;\n";
   if(params.dynamic)
   {
@@ -398,7 +398,10 @@
     file << "assign phasesel_hw = phasesel - 1;\n";
   }
   file << "(* FREQUENCY_PIN_CLKI=\"" << params.clkin_frequency << "\" *)\n";
-  file << "(* FREQUENCY_PIN_CLKOP=\"" << params.fout << "\" *)\n";
+
+  if(params.mode != pll_mode::HIGHRES)
+    file << "(* FREQUENCY_PIN_CLKOP=\"" << params.fout << "\" *)\n";
+
   if(params.secondary[0].enabled)
     file << "(* FREQUENCY_PIN_CLKOS=\"" << params.secondary[0].freq << "\" *)\n";
   if(params.secondary[1].enabled)
@@ -453,7 +456,12 @@
   else
     file << "        .STDBY(1'b0),\n";
   file << "        .CLKI(" << params.clkin_name << "),\n";
-  file << "        .CLKOP(" << params.clkout0_name << "),\n";
+
+  if(params.mode == pll_mode::HIGHRES)
+    file << "        .CLKOP(clkfb),\n";
+  else
+    file << "        .CLKOP(" << params.clkout0_name << "),\n";
+
   if(params.secondary[0].enabled){
     if(params.mode == pll_mode::HIGHRES)
       file << "        .CLKOS(" << params.clkout0_name << "),\n";
@@ -466,16 +474,17 @@
   if(params.secondary[2].enabled){
     file << "        .CLKOS3(" << params.secondary[2].name << "),\n";
   }
-  if(params.internal_feedback)
-  {
+
+  if(params.internal_feedback || params.mode == pll_mode::HIGHRES)
     file << "        .CLKFB(clkfb),\n";
-    file << "        .CLKINTFB(clkfb),\n";
-  }
   else
-  {
     file << "        .CLKFB(" <<  params.feedback_wname[params.feedback_clkout] << "),\n";
+
+  if(params.internal_feedback)
+    file << "        .CLKINTFB(clkfb),\n";
+  else
     file << "        .CLKINTFB(),\n";
-  }
+
   if(params.dynamic)
   {
     file << "        .PHASESEL0(phasesel_hw[0]),\n";
diff --git a/libtrellis/tools/ecpunpack.cpp b/libtrellis/tools/ecpunpack.cpp
index 1029356..5c9365b 100644
--- a/libtrellis/tools/ecpunpack.cpp
+++ b/libtrellis/tools/ecpunpack.cpp
@@ -54,7 +54,7 @@
         cerr << "Version " << git_describe_str << endl;
         cerr << argv[0] << ": ECP5 bitstream to text config converter" << endl;
         cerr << endl;
-        cerr << "Copyright (C) 2018 David Shah <david@symbioticeda.com>" << endl;
+        cerr << "Copyright (C) 2018 gatecat <gatecat@ds0.me>" << endl;
         cerr << endl;
         cerr << "Usage: " << argv[0] << " input.bit [output.config] [options]" << endl;
         cerr << options << endl;
diff --git a/timing/resource/picorv32_large.v b/timing/resource/picorv32_large.v
index 7a5659f..5ad56e1 100644
--- a/timing/resource/picorv32_large.v
+++ b/timing/resource/picorv32_large.v
@@ -52,7 +52,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/timing/resource/picorv32_x20.v b/timing/resource/picorv32_x20.v
index bb76434..20c4283 100644
--- a/timing/resource/picorv32_x20.v
+++ b/timing/resource/picorv32_x20.v
@@ -64,7 +64,7 @@
 /*
  *  PicoRV32 -- A Small RISC-V (RV32I) Processor Core
  *
- *  Copyright (C) 2015  Clifford Wolf <clifford@clifford.at>
+ *  Copyright (C) 2015  Claire Xenia Wolf <claire@yosyshq.com>
  *
  *  Permission to use, copy, modify, and/or distribute this software for any
  *  purpose with or without fee is hereby granted, provided that the above
diff --git a/tools/ecp_vlog.py b/tools/ecp_vlog.py
new file mode 100644
index 0000000..f125770
--- /dev/null
+++ b/tools/ecp_vlog.py
@@ -0,0 +1,1046 @@
+import os
+import re
+import sys
+from collections import defaultdict
+from dataclasses import dataclass, field
+from functools import lru_cache
+from typing import Callable, ClassVar, Dict, List, Optional, Set, Tuple, Type
+
+try:
+    # optional import to get natural sorting of integers (i.e. 1, 5, 9, 10 instead of 1, 10, 5, 9)
+    from natsort import natsorted
+except ImportError:
+    natsorted = sorted
+
+import pytrellis
+import database
+
+
+# Conversions between tiles and locations
+@dataclass
+class TileData:
+    tile: pytrellis.Tile
+    cfg: pytrellis.TileConfig
+
+
+Location = Tuple[int, int]  # pytrellis.Location cannot be used as a dictionary key
+TilesByLoc = Dict[Location, List[TileData]]
+
+
+def make_tiles_by_loc(chip: pytrellis.Chip) -> TilesByLoc:
+    tiles_by_loc: TilesByLoc = defaultdict(list)
+
+    for tilename, tile in chip.tiles.items():
+        locator = pytrellis.TileLocator(chip.info.family, chip.info.name, tile.info.type)
+        tilebitdb = pytrellis.get_tile_bitdata(locator)
+        tilecfg = tilebitdb.tile_cram_to_config(tile.cram)
+
+        rc = tile.info.get_row_col()
+        row, col = rc.first, rc.second
+        tileloc = pytrellis.Location(col, row)
+
+        tiles_by_loc[tileloc.x, tileloc.y].append(TileData(tile, tilecfg))
+
+    return tiles_by_loc
+
+
+# Utility classes representing a graph of configured connections
+@dataclass(eq=True, order=True, frozen=True)
+class Ident:
+    """ An identifier in the routing graph """
+
+    # place label first so we sort by identifier
+    label: str = field(compare=False)
+    # Idents are unique by ID so we only need to compare IDs
+    id: int = field(repr=False)
+    # Having a cache for Ident objects reduces memory pressure,
+    # speeds up Ident creation slightly, and significantly reduces
+    # the size of pickled graphs.
+    _cache: ClassVar[Dict[int, "Ident"]] = {}
+
+    @classmethod
+    def from_id(cls, rgraph: pytrellis.RoutingGraph, id: int) -> "Ident":
+        if id in cls._cache:
+            return cls._cache[id]
+        inst = Ident(rgraph.to_str(id), id)
+        cls._cache[id] = inst
+        return inst
+
+    @classmethod
+    def from_label(cls, rgraph: pytrellis.RoutingGraph, label: str) -> "Ident":
+        return cls.from_id(rgraph, rgraph.ident(label))
+
+    def __str__(self) -> str:
+        return self.label
+
+
+@dataclass(eq=True, order=True, frozen=True)
+class Node:
+    """ A node in the routing graph - either a wire or a BEL pin """
+
+    # put y first so we sort by row, then column
+    y: int
+    x: int
+    id: Ident
+    pin: Optional[Ident] = None
+    mod_name_map: ClassVar[Dict[str, str]] = {}
+
+    @property
+    def loc(self) -> pytrellis.Location:
+        return pytrellis.Location(self.x, self.y)
+
+    @property
+    def mod_name(self) -> str:
+        res = f"R{self.y}C{self.x}_{self.name}"
+        return res
+
+    @property
+    def name(self) -> str:
+        return self.id.label
+
+    @property
+    def pin_name(self) -> str:
+        if self.pin is None:
+            return ""
+        return self.pin.label
+
+    def __str__(self) -> str:
+        mod_name = self.mod_name
+        pin_name = self.pin_name
+        res = self.mod_name_map.get(mod_name, mod_name)
+        if pin_name:
+            res += "$" + pin_name
+        return res
+
+
+EdgeMap = Dict[Node, Set[Node]]
+
+
+@dataclass
+class Component:
+    graph: "ConnectionGraph"
+    nodes: Set[Node] = field(default_factory=set)
+
+    def get_roots(self) -> Set[Node]:
+        roots = set()
+        seen: Dict[Node, int] = {}
+
+        def visit(node: Node) -> None:
+            if node in seen:
+                if seen[node] == 0:
+                    print(f"Warning: node {node} is part of a cycle!", file=sys.stderr)
+                return
+            seen[node] = 0
+            if not self.graph.edges_rev[node]:
+                roots.add(node)
+            else:
+                for x in self.graph.edges_rev[node]:
+                    visit(x)
+            seen[node] = 1
+
+        for x in self.nodes:
+            visit(x)
+
+        return roots
+
+    def get_leaves(self) -> Set[Node]:
+        leaves = set()
+        seen: Dict[Node, int] = {}
+
+        def visit(node: Node) -> None:
+            if node in seen:
+                if seen[node] == 0:
+                    print(f"Warning: node {node} is part of a cycle!", file=sys.stderr)
+                return
+            seen[node] = 0
+            if not self.graph.edges_fwd[node]:
+                leaves.add(node)
+            else:
+                for x in self.graph.edges_fwd[node]:
+                    visit(x)
+            seen[node] = 1
+
+        for x in self.nodes:
+            visit(x)
+
+        return leaves
+
+
+@dataclass
+class ConnectionGraph:
+    """ A directed graph of Nodes. """
+
+    edges_fwd: EdgeMap = field(default_factory=lambda: defaultdict(set))
+    edges_rev: EdgeMap = field(default_factory=lambda: defaultdict(set))
+
+    def add_edge(self, source: Node, sink: Node) -> None:
+        self.edges_fwd[source].add(sink)
+        self.edges_rev[sink].add(source)
+
+    def get_components(self) -> List[Component]:
+        seen: Set[Node] = set()
+
+        def visit(node: Node, component: Component) -> None:
+            if node in seen:
+                return
+            seen.add(node)
+
+            component.nodes.add(node)
+            if node in self.edges_fwd:
+                for x in self.edges_fwd[node]:
+                    visit(x, component)
+            if node in self.edges_rev:
+                for x in self.edges_rev[node]:
+                    visit(x, component)
+
+        components: List[Component] = []
+        for edges in (self.edges_rev, self.edges_fwd):
+            for node in edges:
+                if node in seen:
+                    continue
+                component = Component(self)
+                visit(node, component)
+                components.append(component)
+
+        return components
+
+
+# Connection graph generation
+def gen_config_graph(chip: pytrellis.Chip, rgraph: pytrellis.RoutingGraph, tiles_by_loc: TilesByLoc) -> ConnectionGraph:
+    @lru_cache(None)
+    def get_zero_bit_arcs(chip: pytrellis.Chip, tiletype: str) -> Dict[str, List[str]]:
+        """Get configurable zero-bit arcs from the given tile.
+
+        tile_cram_to_config ignores zero-bit arcs when generating the TileConfig,
+        which means that if all bits are unset for a given mux, no connection is
+        generated at all."""
+        locator = pytrellis.TileLocator(chip.info.family, chip.info.name, tiletype)
+        tilebitdb = pytrellis.get_tile_bitdata(locator)
+        arcs: Dict[str, List[str]] = defaultdict(list)
+        for sink in tilebitdb.get_sinks():
+            mux_data = tilebitdb.get_mux_data_for_sink(sink)
+            for arc_name, arc_data in mux_data.arcs.items():
+                if len(arc_data.bits.bits) == 0:
+                    arcs[sink].append(arc_name)
+        return arcs
+
+    def bel_to_node(pos: Tuple[pytrellis.RoutingId, int]) -> Node:
+        rid, bel_pin = pos
+        id = Ident.from_id(rgraph, rid.id)
+        pin = Ident.from_id(rgraph, bel_pin)
+        return Node(x=rid.loc.x, y=rid.loc.y, id=id, pin=pin)
+
+    def wire_to_node(rid: pytrellis.RoutingId) -> Node:
+        id = Ident.from_id(rgraph, rid.id)
+        return Node(x=rid.loc.x, y=rid.loc.y, id=id)
+
+    def _get_enum_value(cfg: pytrellis.TileConfig, enum_name: str) -> Optional[str]:
+        for cenum in cfg.cenums:
+            if cenum.name == enum_name:
+                return cenum.value
+        return None
+
+    def _filter_data_pin(node: Node) -> bool:
+        # IOLOGIC[AC].[RT]XDATA[456] are mutually exclusive with IOLOGIC[BD].[RT]XDATA[0123],
+        # depending on whether 7:1 gearing is used, becacuse 7:1 gearing occupies two adjacent
+        # IOLOGIC units (A+B or C+D). Because they're mutually exclusive, some of the pins are
+        # hardwired together (e.g. 4A and 0B). To avoid a multi-root situation and spurious
+        # inputs/outputs, we need to pick which set to include based on the IO configuration.
+
+        bel_id = node.mod_name[-1]
+        assert bel_id in "ABCD"
+        pin_id = node.pin_name[-1]
+        assert pin_id in "0123456"
+
+        if bel_id in "AC" and pin_id in "0123":
+            # These pins are unconflicted
+            return True
+
+        if bel_id in "AB":
+            tiles = tiles_by_loc[node.x, node.y]
+            main_mod = "IOLOGICA"
+        else:
+            # HACK: The IOLOGICC enums seem to be the PIC[LR]2 tiles,
+            # which appear to always be exactly two tiles down from
+            # the PIC[LR]0 tiles where the actual pins are.
+            # This seems very fragile.
+            tiles = tiles_by_loc[node.x, node.y + 2]
+            main_mod = "IOLOGICC"
+
+        # Make sure we get the right tile on the tile location
+        for tiledata in tiles:
+            if any(site.type == main_mod for site in tiledata.tile.info.sites):
+                break
+        else:
+            print("error: could not locate IOLOGIC enums", file=sys.stderr)
+            return True
+
+        if node.pin_name.startswith("RX"):
+            is_71_mode = _get_enum_value(tiledata.cfg, main_mod + "IDDRXN.MODE") == "IDDR71"
+        else:
+            is_71_mode = _get_enum_value(tiledata.cfg, main_mod + "ODDRXN.MODE") == "ODDR71"
+
+        # Note that [456][BD] do not exist.
+        if pin_id in "456" and is_71_mode:
+            return True
+        elif pin_id in "0123" and not is_71_mode:
+            return True
+        return False
+
+    def add_edge(graph: ConnectionGraph, sourcenode: Node, sinknode: Node) -> None:
+        """ Add an edge subject to special-case filtering """
+
+        if re.match(r"^F[5X][ABCD]_SLICE$", sourcenode.name) and re.match(r"^F\d$", sinknode.name):
+            # Some of the -> Fn muxes use the same bits as the CCU2.INJECT enums.
+            # In CCU2 mode, these muxes should be fixed to Fn_SLICE -> Fn, and should
+            # not be set to F[5X] -> Fn no matter what the value of the mux bits are
+            # (since they represent CCU2_INJECT instead)
+            enum_name = f"SLICE{sourcenode.name[2]}.MODE"
+            for tiledata in tiles_by_loc[sourcenode.x, sinknode.y]:
+                if tiledata.tile.info.type.startswith("PLC2") and _get_enum_value(tiledata.cfg, enum_name) == "CCU2":
+                    # CCU2: correct F[5X]n_SLICE connection to Fn_SLICE -> Fn
+                    newsource = Ident.from_label(rgraph, sinknode.name + "_SLICE")
+                    sourcenode = Node(x=sourcenode.x, y=sourcenode.y, id=newsource)
+                    break
+        elif sourcenode.pin_name.startswith("RXDATA") and not _filter_data_pin(sourcenode):
+            # See comment in _filter_data_pin
+            return
+        elif sinknode.pin_name.startswith("TXDATA") and not _filter_data_pin(sinknode):
+            # See comment in _filter_data_pin
+            return
+
+        graph.add_edge(sourcenode, sinknode)
+
+    config_graph = ConnectionGraph()
+
+    for loc in tiles_by_loc:
+        rtile = rgraph.tiles[pytrellis.Location(loc[0], loc[1])]
+        for tiledata in tiles_by_loc[loc]:
+            tile = tiledata.tile
+            for arc in tiledata.cfg.carcs:
+                rarc = rtile.arcs[rgraph.ident(f"{arc.source}->{arc.sink}")]
+                sourcenode = wire_to_node(rarc.source)
+                sinknode = wire_to_node(rarc.sink)
+                add_edge(config_graph, sourcenode, sinknode)
+
+    # Expand configuration arcs to include BEL connections and zero-bit arcs
+    arc_graph = ConnectionGraph()
+    nodes_seen: Set[Node] = set()
+
+    def visit_node(node: Node, bel_func: Callable[[Node], None]) -> None:
+        """ Add unconfigurable or implicit arcs to the given node """
+        if node in nodes_seen:
+            return
+        nodes_seen.add(node)
+
+        try:
+            rtile = rgraph.tiles[node.loc]
+            rwire = rtile.wires[node.id.id]
+        except KeyError:
+            # there's a handful of troublesome cases which are outside of my control.
+            # Example: R0C31_G_ULDDRDEL does not exist; it's actually supposed to be the "fixed"
+            # connection G_ULDDRDEL=>DDRDEL but G_ULDDRDEL is not in the same tile.
+            print(f"Error: failed to find node {str(node)}", file=sys.stderr)
+            return
+
+        if node not in config_graph.edges_rev:
+            # Not configured - possible zero-bit configuration
+            for tiledata in tiles_by_loc[node.x, node.y]:
+                arcs = get_zero_bit_arcs(chip, tiledata.tile.info.type)
+                sources = arcs.get(node.id.label, [])
+                if not sources:
+                    continue
+                for source in sources:
+                    sourceid = Ident.from_label(rgraph, source)
+                    sourcenode = Node(x=node.x, y=node.y, id=sourceid)
+                    add_edge(arc_graph, sourcenode, node)
+                    visit_node(sourcenode, bel_func)
+
+        # Add fixed connections
+        for bel in rwire.belsUphill:
+            add_edge(arc_graph, bel_to_node(bel), node)
+            bel_func(wire_to_node(bel[0]))
+        for bel in rwire.belsDownhill:
+            add_edge(arc_graph, node, bel_to_node(bel))
+            bel_func(wire_to_node(bel[0]))
+        for routes in [rwire.uphill, rwire.downhill]:
+            for rarcrid in routes:
+                rarcname = rgraph.to_str(rarcrid.id)
+                if "=>" in rarcname:
+                    # => means a fixed (unconfigurable) connection
+                    rarc = rgraph.tiles[rarcrid.loc].arcs[rarcrid.id]
+                    sourcenode = wire_to_node(rarc.source)
+                    sinknode = wire_to_node(rarc.sink)
+                    add_edge(arc_graph, sourcenode, sinknode)
+                    visit_node(sourcenode, bel_func)
+                    visit_node(sinknode, bel_func)
+
+        # Add global (clock) connections - Project Trellis omits a lot of these :(
+        if node.name.startswith("G_HPBX"):
+            # TAP_DRIVE -> PLB tile
+            tap = chip.global_data.get_tap_driver(node.y, node.x)
+            if tap.dir == pytrellis.TapDir.LEFT:
+                tap_name = node.name.replace("G_HPBX", "L_HPBX")
+            else:
+                tap_name = node.name.replace("G_HPBX", "R_HPBX")
+            tap_id = Ident.from_label(rgraph, tap_name)
+            tap_node = Node(x=tap.col, y=node.y, id=tap_id)
+            add_edge(arc_graph, tap_node, node)
+            visit_node(tap_node, bel_func)
+
+        elif node.name.startswith("G_VPTX"):
+            # Spine tile -> TAP_DRIVE
+            tap = chip.global_data.get_tap_driver(node.y, node.x)
+            if tap.col == node.x:
+                # Spine output
+                quadrant = chip.global_data.get_quadrant(node.y, node.x)
+                spine = chip.global_data.get_spine_driver(quadrant, node.x)
+                spine_node = Node(x=spine.second, y=spine.first, id=node.id)
+                add_edge(arc_graph, spine_node, node)
+                visit_node(spine_node, bel_func)
+
+        elif node.name.startswith("G_HPRX"):
+            # Center mux -> spine tile (qqPCLKn -> G_HPRXnn00)
+            quadrant = chip.global_data.get_quadrant(node.y, node.x)
+            assert node.name.endswith("00")
+            clkid = int(node.name[6:-2])
+            global_id = Ident.from_label(rgraph, f"G_{quadrant}PCLK{clkid}")
+            global_node = Node(x=0, y=0, id=global_id)
+            add_edge(arc_graph, global_node, node)
+            visit_node(global_node, bel_func)
+
+    # Visit every configured arc and record all BELs seen
+    bels_todo: Set[Node] = set()
+    for sourcenode, nodes in config_graph.edges_fwd.items():
+        for sinknode in nodes:
+            add_edge(arc_graph, sourcenode, sinknode)
+            visit_node(sourcenode, bels_todo.add)
+            visit_node(sinknode, bels_todo.add)
+
+    # Adding *every* fixed connection is too expensive.
+    # As a compromise, add any fixed connection connected
+    # to used BELs. Ignore BELs that don't have any configured
+    # arcs.
+    for node in bels_todo:
+        rtile = rgraph.tiles[node.loc]
+        for _, rwire in rtile.wires.items():
+            wireident = Ident.from_id(rgraph, rwire.id)
+            wirenode = Node(x=node.x, y=node.y, id=wireident)
+            for bel in rwire.belsUphill:
+                if bel[0].id == node.id.id:
+                    add_edge(arc_graph, bel_to_node(bel), wirenode)
+                    visit_node(wirenode, lambda node: None)
+            for bel in rwire.belsDownhill:
+                if bel[0].id == node.id.id:
+                    add_edge(arc_graph, wirenode, bel_to_node(bel))
+                    visit_node(wirenode, lambda node: None)
+
+    return arc_graph
+
+
+# Verilog generation
+def filter_node(node: Node) -> bool:
+    if node.pin is None:
+        # We assume that all *useful* wires go between BELs.
+        return False
+    if "_ECLKSYNC" in node.mod_name:
+        # ECLKSYNC BELs appear to basically coincide with ECLKBUF BELs, making them redundant
+        # for the purposes of Verilog generation.
+        return False
+    if node.pin_name.startswith("IOLDO") or node.pin_name.startswith("IOLTO"):
+        # IOLDO/IOLTO are for internal use:
+        # https://freenode.irclog.whitequark.org/~h~openfpga/2018-12-25#23748701;
+        # 07:55 <daveshah> kbeckmann: IOLDO and IOLTO are for internal use only
+        # 07:55 <daveshah> They are for the dedicated interconnect between IOLOGIC and PIO
+        # Since we don't currently implement I/O modules, these pins do not
+        # need to be exported to Verilog.
+        return False
+    if node.pin_name == "INDD":
+        # INDD is the input after the delay block. This is currently redundant because
+        # the input source (PIO$O) will be exposed as an independent input, so the module's
+        # caller can simply hard-code an appropriate delay to the module input.
+        # If the I/O modules are ever implemented, it will be necessary to disambiguate
+        # PIO$O from INDD for the IOLOGIC$DI input to avoid a multi-root situation.
+        return False
+    return True
+
+
+@dataclass
+class Module:
+    """ A class to encapsulate a synthesized BEL supported by simulation """
+
+    module_name: str
+    tiledata: TileData
+    pin_map: Dict[str, Node]
+
+    input_pins: ClassVar[List[str]] = []
+    output_pins: ClassVar[List[str]] = []
+
+    @classmethod
+    def create_from_node(cls, node: Node, tiles_by_loc: TilesByLoc) -> Optional["Module"]:
+        modcls: Type[Module]
+        if node.name.startswith("SLICE"):
+            modcls = SliceModule
+            tiletype = "PLC2"
+        elif node.name.startswith("EBR"):
+            modcls = EBRModule
+            tiletype = "MIB_EBR"
+        else:
+            return None
+
+        for tiledata in tiles_by_loc[node.x, node.y]:
+            if tiledata.tile.info.type.startswith(tiletype):
+                break
+        else:
+            raise Exception(f"Tile type {tiletype} not found for node {node}")
+
+        return modcls(node.name, tiledata, {})
+
+    @classmethod
+    def print_definition(cls) -> None:
+        """ Print the Verilog code for the module definition """
+        raise NotImplementedError()
+
+    def _print_parameters(self, param_renames: Dict[str, str]) -> None:
+        """ Print the BEL's enums and words as an instance parameter list """
+        strs: List[str] = []
+
+        # Dump enumerations in Verilog-compatible format
+        for e in self.tiledata.cfg.cenums:
+            bel, ename = e.name.split(".", 1)
+            ename = ename.replace(".", "_")
+            ename = param_renames.get(ename, ename)
+            if bel == self.module_name:
+                strs.append(f'  .{ename}("{e.value}")')
+        # Dump initialization words in Verilog format
+        for w in self.tiledata.cfg.cwords:
+            bel, ename = w.name.split(".", 1)
+            ename = ename.replace(".", "_")
+            ename = param_renames.get(ename, ename)
+            if bel == self.module_name:
+                value = [str(int(c)) for c in w.value]
+                valuestr = "".join(value[::-1])
+                strs.append(f"  .{ename}({len(value)}'b{valuestr})")
+
+        if strs:
+            print(",\n".join(strs))
+
+    def _print_pins(self) -> None:
+        """ Print the BEL's enums and words as an instance parameter list """
+        strs: List[str] = []
+
+        # Dump input/output pins (already referenced to root pins), inputs first
+        pin_map_pins = set(self.pin_map.keys())
+        all_input_pins = set(self.input_pins)
+        output_pins = natsorted(pin_map_pins - all_input_pins)
+        input_pins = natsorted(pin_map_pins & all_input_pins)
+        for pin in input_pins + output_pins:
+            strs.append(f"  .{pin}( {self.pin_map[pin]} )")
+
+        if strs:
+            print(",\n".join(strs))
+
+    def print_instance(self, instname: str) -> None:
+        """ Print the Verilog code for this specific module instance """
+        raise NotImplementedError()
+
+
+@dataclass
+class SliceModule(Module):
+    input_pins: ClassVar[List[str]] = [
+        "A0",
+        "B0",
+        "C0",
+        "D0",
+        "A1",
+        "B1",
+        "C1",
+        "D1",
+        "M0",
+        "M1",
+        "FCI",
+        "FXA",
+        "FXB",
+        "CLK",
+        "LSR",
+        "CE",
+        "DI0",
+        "DI1",
+        "WD0",
+        "WD1",
+        "WAD0",
+        "WAD1",
+        "WAD2",
+        "WAD3",
+        "WRE",
+        "WCK",
+    ]
+
+    output_pins: ClassVar[List[str]] = [
+        "F0",
+        "Q0",
+        "F1",
+        "Q1",
+        "FCO",
+        "OFX0",
+        "OFX1",
+        "WDO0",
+        "WDO1",
+        "WDO2",
+        "WDO3",
+        "WADO0",
+        "WADO1",
+        "WADO2",
+        "WADO3",
+    ]
+
+    @classmethod
+    def print_definition(cls) -> None:
+        """ Print the Verilog code for the module definition """
+        params = [
+            "MODE",
+            "GSR",
+            "SRMODE",
+            "CEMUX",
+            "CLKMUX",
+            "LSRMUX",
+            "LUT0_INITVAL",
+            "LUT1_INITVAL",
+            "REG0_SD",
+            "REG1_SD",
+            "REG0_REGSET",
+            "REG1_REGSET",
+            "REG0_LSRMODE",
+            "REG1_LSRMODE",
+            "CCU2_INJECT1_0",
+            "CCU2_INJECT1_1",
+            "WREMUX",
+            "WCKMUX",
+            "A0MUX",
+            "A1MUX",
+            "B0MUX",
+            "B1MUX",
+            "C0MUX",
+            "C1MUX",
+            "D0MUX",
+            "D1MUX",
+        ]
+
+        print(
+            f"""
+/* This module requires the cells_sim library from yosys/techlibs/ecp5/cells.sim.v
+   for the TRELLIS_SLICE definition. Include that cell library before including this
+   file. */
+module ECP5_SLICE(
+    input {", ".join(cls.input_pins)},
+    output {", ".join(cls.output_pins)}
+);
+
+    /* These defaults correspond to all-zero-bit enumeration values */
+    parameter MODE = "LOGIC";
+    parameter GSR = "ENABLED";
+    parameter SRMODE = "LSR_OVER_CE";
+    parameter [127:0] CEMUX = "CE";
+    parameter CLKMUX = "CLK";
+    parameter LSRMUX = "LSR";
+    parameter LUT0_INITVAL = 16'hFFFF;
+    parameter LUT1_INITVAL = 16'hFFFF;
+    parameter REG0_SD = "1";
+    parameter REG1_SD = "1";
+    parameter REG0_REGSET = "SET";
+    parameter REG1_REGSET = "SET";
+    parameter REG0_LSRMODE = "LSR";
+    parameter REG1_LSRMODE = "LSR";
+    parameter [127:0] CCU2_INJECT1_0 = "YES";
+    parameter [127:0] CCU2_INJECT1_1 = "YES";
+    parameter WREMUX = "WRE";
+    parameter WCKMUX = "WCK";
+
+    parameter A0MUX = "A0";
+    parameter A1MUX = "A1";
+    parameter B0MUX = "B0";
+    parameter B1MUX = "B1";
+    parameter C0MUX = "C0";
+    parameter C1MUX = "C1";
+    parameter D0MUX = "D0";
+    parameter D1MUX = "D1";
+
+    TRELLIS_SLICE #(
+        {", ".join(f".{param}({param})" for param in params)}
+    ) impl (
+        {", ".join(f".{pin}({pin})" for pin in cls.input_pins)},
+        {", ".join(f".{pin}({pin})" for pin in cls.output_pins)}
+    );
+endmodule
+""".strip()
+        )
+
+    def print_instance(self, instname: str) -> None:
+        print("ECP5_SLICE #(")
+        self._print_parameters(
+            {
+                "K0_INIT": "LUT0_INITVAL",
+                "K1_INIT": "LUT1_INITVAL",
+            }
+        )
+        print(f") {instname} (")
+        self._print_pins()
+        print(");")
+        print()
+
+
+class EBRModule(Module):
+    input_pins: ClassVar[List[str]] = [
+        # Byte Enable wires
+        "ADA0",
+        "ADA1",
+        "ADA2",
+        "ADA3",
+        # ADW
+        "ADA5",
+        "ADA6",
+        "ADA7",
+        "ADA8",
+        "ADA9",
+        "ADA10",
+        "ADA11",
+        "ADA12",
+        "ADA13",
+        # ADR
+        "ADB5",
+        "ADB6",
+        "ADB7",
+        "ADB8",
+        "ADB9",
+        "ADB10",
+        "ADB11",
+        "ADB12",
+        "ADB13",
+        "CEB",  # CER
+        "CLKA",  # CLKW
+        "CLKB",  # CLKR
+        # DI
+        "DIA0",
+        "DIA1",
+        "DIA2",
+        "DIA3",
+        "DIA4",
+        "DIA5",
+        "DIA6",
+        "DIA7",
+        "DIA8",
+        "DIA9",
+        "DIA10",
+        "DIA11",
+        "DIA12",
+        "DIA13",
+        "DIA14",
+        "DIA15",
+        "DIA16",
+        "DIA17",
+        "DIB0",
+        "DIB1",
+        "DIB2",
+        "DIB3",
+        "DIB4",
+        "DIB5",
+        "DIB6",
+        "DIB7",
+        "DIB8",
+        "DIB9",
+        "DIB10",
+        "DIB11",
+        "DIB12",
+        "DIB13",
+        "DIB14",
+        "DIB15",
+        "DIB16",
+        "DIB17",
+    ]
+
+    output_pins: ClassVar[List[str]] = [
+        # DO
+        "DOA0",
+        "DOA1",
+        "DOA2",
+        "DOA3",
+        "DOA4",
+        "DOA5",
+        "DOA6",
+        "DOA7",
+        "DOA8",
+        "DOA9",
+        "DOA10",
+        "DOA11",
+        "DOA12",
+        "DOA13",
+        "DOA14",
+        "DOA15",
+        "DOA16",
+        "DOA17",
+        "DOB0",
+        "DOB1",
+        "DOB2",
+        "DOB3",
+        "DOB4",
+        "DOB5",
+        "DOB6",
+        "DOB7",
+        "DOB8",
+        "DOB9",
+        "DOB10",
+        "DOB11",
+        "DOB12",
+        "DOB13",
+        "DOB14",
+        "DOB15",
+        "DOB16",
+        "DOB17",
+    ]
+
+    @classmethod
+    def print_definition(cls) -> None:
+        """ Print the Verilog code for the module definition """
+        print(
+            f"""
+module ECP5_EBR(
+    input {", ".join(cls.input_pins)},
+    output {", ".join(cls.output_pins)}
+);
+
+    /* These defaults correspond to all-zero-bit enumeration values */
+    parameter CSDECODE_A = 3'b111;
+    parameter CSDECODE_B = 3'b111;
+    parameter ADA0MUX = "ADA0";
+    parameter ADA2MUX = "ADA2";
+    parameter ADA3MUX = "ADA3";
+    parameter ADB0MUX = "ADB0";
+    parameter ADB1MUX = "ADB1";
+    parameter CEAMUX = "CEA";
+    parameter CEBMUX = "CEB";
+    parameter CLKAMUX = "CLKA";
+    parameter CLKBMUX = "CLKB";
+    parameter DP16KD_DATA_WIDTH_A = "18";
+    parameter DP16KD_DATA_WIDTH_B = "18";
+    parameter DP16KD_WRITEMODE_A = "NORMAL";
+    parameter DP16KD_WRITEMODE_B = "NORMAL";
+    parameter MODE = "NONE";
+    parameter OCEAMUX = "OCEA";
+    parameter OCEBMUX = "OCEB";
+    parameter PDPW16KD_DATA_WIDTH_R = "18";
+    parameter PDPW16KD_RESETMODE = "SYNC";
+    parameter WEAMUX = "WEA";
+    parameter WEBMUX = "WEB";
+
+    /* TODO! */
+
+endmodule
+""".strip()
+        )
+
+    def print_instance(self, instname: str) -> None:
+        print("ECP5_EBR #(")
+        self._print_parameters({})
+        print(f") {instname} (")
+        self._print_pins()
+        print(");")
+        print()
+
+
+def print_verilog(graph: ConnectionGraph, tiles_by_loc: TilesByLoc, top_name: str) -> None:
+    # Extract connected components and their roots & leaves
+    sorted_components: List[Tuple[Component, List[Node], List[Node]]] = []
+    for component in graph.get_components():
+        roots = sorted([node for node in component.get_roots() if filter_node(node)])
+        if not roots:
+            continue
+        leaves = sorted([node for node in component.get_leaves() if filter_node(node)])
+        if not leaves:
+            continue
+        sorted_components.append((component, roots, leaves))
+    sorted_components = sorted(sorted_components, key=lambda x: x[1][0])
+
+    # Verilog input, output, and external wires
+    mod_sources: Set[Node] = set()
+    mod_sinks: Dict[Node, Node] = {}
+    mod_globals: Set[Node] = set()
+
+    modules: Dict[str, Module] = {}
+
+    print("/* Automatically generated by ecp_vlog.py")
+    for component, roots, leaves in sorted_components:
+        if len(roots) > 1:
+            print()
+            print("Unhandled multi-root component:")
+            print(*roots, sep=", ")
+            print(" -> ", end="")
+            print(*leaves, sep=", ")
+            continue
+
+        mod_sources.add(roots[0])
+        for node in leaves:
+            mod_sinks[node] = roots[0]
+        for node in roots + leaves:
+            if node.mod_name in modules:
+                modules[node.mod_name].pin_map[node.pin_name] = roots[0]
+                continue
+
+            mod_def = Module.create_from_node(node, tiles_by_loc)
+            if not mod_def:
+                mod_globals.add(node)
+                continue
+            mod_def.pin_map[node.pin_name] = roots[0]
+            modules[node.mod_name] = mod_def
+
+    # filter out any globals that are just copies of inputs or other outputs
+    for node in mod_globals:
+        if node in mod_sinks and mod_sinks[node] in mod_globals:
+            print(f"filtered out passed-through output: {mod_sinks[node]} -> {node}")
+            del mod_sinks[node]
+    all_sources: Set[Node] = set()
+    for sink in mod_sinks:
+        all_sources.add(mod_sinks[sink])
+    for node in mod_globals:
+        if node in mod_sources and node not in all_sources:
+            print(f"filtered out unused input: {node}")
+            mod_sources.discard(node)
+    print("*/")
+
+    for mod_type in set(type(mod_def) for mod_def in modules.values()):
+        mod_type.print_definition()
+
+    print(f"module {top_name}(")
+    mod_globals_vars = ["  input wire " + str(node) for node in mod_sources & mod_globals]
+    mod_globals_vars += ["  output wire " + str(node) for node in set(mod_sinks) & mod_globals]
+    print(" ,\n".join(natsorted(mod_globals_vars)))
+    print(");")
+    print()
+
+    # sources are either connected to global inputs
+    # or are outputs from some other node
+    for node in natsorted(mod_sources - mod_globals, key=str):
+        print(f"wire {node} ;")
+    print()
+
+    # sinks are either fed directly into a BEL,
+    # in which case they are directly substituted,
+    # or they are global outputs
+    for node in natsorted(set(mod_sinks) & mod_globals, key=str):
+        print(f"assign {node} = {mod_sinks[node]} ;")
+    print()
+
+    for modname in natsorted(modules):
+        modules[modname].print_instance(modname)
+
+    # debugging: print out any enums or words that we didn't handle in a Module
+    print("/* Unhandled enums/words:")
+    seen_enums: Set[Tuple[pytrellis.TileConfig, int]] = set()
+    seen_words: Set[Tuple[pytrellis.TileConfig, int]] = set()
+    for module in modules.values():
+        for i, e in enumerate(module.tiledata.cfg.cenums):
+            bel, _ = e.name.split(".", 1)
+            if bel == module.module_name:
+                seen_enums.add((module.tiledata.cfg, i))
+        for i, w in enumerate(module.tiledata.cfg.cwords):
+            bel, _ = w.name.split(".", 1)
+            if bel == module.module_name:
+                seen_words.add((module.tiledata.cfg, i))
+    for loc in sorted(tiles_by_loc.keys(), key=lambda loc: (loc[1], loc[0])):
+        for tiledata in tiles_by_loc[loc]:
+            for i, e in enumerate(tiledata.cfg.cenums):
+                if (tiledata.cfg, i) not in seen_enums:
+                    print(" ", tiledata.tile.info.name, "enum:", e.name, e.value)
+            for i, w in enumerate(tiledata.cfg.cwords):
+                if (tiledata.cfg, i) not in seen_words:
+                    valuestr = "".join([str(int(c)) for c in w.value][::-1])
+                    print(" ", tiledata.tile.info.name, "word:", w.name, valuestr)
+    print("*/")
+    print("endmodule")
+
+
+def parse_lpf(filename: str) -> Dict[str, str]:
+    import shlex
+
+    lines = []
+    with open(filename, "r") as f:
+        for row in f:
+            row = row.split("#", 1)[0].split("//", 1)[0].strip()
+            if row:
+                lines.append(row)
+
+    sites: Dict[str, str] = {}
+
+    commands = " ".join(lines).split(";")
+    for cmd in commands:
+        cmd = cmd.strip()
+        if not cmd:
+            continue
+
+        words = shlex.split(cmd)
+        if words[0] == "LOCATE":
+            if len(words) != 5 or words[1] != "COMP" or words[3] != "SITE":
+                print("ignoring malformed LOCATE in LPF:", cmd, file=sys.stderr)
+            sites[words[4]] = words[2]
+
+    return sites
+
+
+def main(argv: List[str]) -> None:
+    import argparse
+    import json
+
+    parser = argparse.ArgumentParser("Convert a .bit file into a .v verilog file for simulation")
+
+    parser.add_argument("bitfile", help="Input .bit file")
+    parser.add_argument("--package", help="Physical package (e.g. CABGA256), for renaming I/O ports")
+    parser.add_argument("--lpf", help="Use LOCATE COMP commands from this LPF file to name I/O ports")
+    parser.add_argument("-n", "--module-name", help="Name for the top-level module (default: top)", default="top")
+    args = parser.parse_args(argv)
+
+    if args.lpf and not args.package:
+        parser.error("Cannot use a LPF file without specifying the chip package")
+
+    pytrellis.load_database(database.get_db_root())
+
+    print("Loading bitstream...", file=sys.stderr)
+    bitstream = pytrellis.Bitstream.read_bit(args.bitfile)
+    chip = bitstream.deserialise_chip()
+
+    if args.package:
+        dbfn = os.path.join(database.get_db_subdir(chip.info.family, chip.info.name), "iodb.json")
+        with open(dbfn, "r") as f:
+            iodb = json.load(f)
+
+        if args.lpf:
+            lpf_map = parse_lpf(args.lpf)
+        else:
+            lpf_map = {}
+
+        # Rename PIO and IOLOGIC BELs based on their connected pins, for readability
+        mod_renames = {}
+        for pin_name, pin_data in iodb["packages"][args.package].items():
+            if pin_name in lpf_map:
+                # escape LPF name in case it has funny characters
+                pin_name = "\\" + lpf_map[pin_name]
+            # PIO and IOLOGIC do not share pin names except for IOLDO/IOLTO
+            mod_renames["R{row}C{col}_PIO{pio}".format(**pin_data)] = f"{pin_name}"
+            mod_renames["R{row}C{col}_IOLOGIC{pio}".format(**pin_data)] = f"{pin_name}"
+
+        # Note: the mod_name_map only affects str(node), not node.mod_name
+        Node.mod_name_map = mod_renames
+
+    print("Computing routing graph...", file=sys.stderr)
+    rgraph = chip.get_routing_graph()
+
+    print("Computing connection graph...", file=sys.stderr)
+    tiles_by_loc = make_tiles_by_loc(chip)
+    graph = gen_config_graph(chip, rgraph, tiles_by_loc)
+
+    print("Generating Verilog...", file=sys.stderr)
+    print_verilog(graph, tiles_by_loc, args.module_name)
+
+    print("Done!", file=sys.stderr)
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])