Merge pull request #1879 from antmicro/fix-race-condition
Fix race condition
diff --git a/.github/workflows/scripts/db.sh b/.github/workflows/scripts/db.sh
index 1b69807..82ad251 100755
--- a/.github/workflows/scripts/db.sh
+++ b/.github/workflows/scripts/db.sh
@@ -73,7 +73,7 @@
# Looking for the failing directories and packing them
# example of line from which the failing fuzzer directory gets extracted:
# - Makefile:87: recipe for target '000-db-init/000-init-db/run.xc7a100tfgg676-1.ok' failed --> fuzzers/000-db-init
- grep -Po "recipe for target '\K(.*)(?=\/run\..*\.ok')" $tmp | sed -e 's/^/fuzzers\//' | xargs tar -zcf fuzzers/fails.tgz
+ grep -Po "recipe for target '\K(.*)(?=\/run.*\.ok')" $tmp | sed -e 's/^/fuzzers\//' | xargs tar -zcf fuzzers/fails.tgz
echo "----------------------------------------"
echo "A failure occurred during Database build."
echo "----------------------------------------"
diff --git a/prjxray/bitfilter.py b/prjxray/bitfilter.py
index 66a1c98..f7184bd 100644
--- a/prjxray/bitfilter.py
+++ b/prjxray/bitfilter.py
@@ -8,6 +8,9 @@
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
+
+from prjxray.util import OpenSafeFile
+
class Bitfilter(object):
def __init__(
self, frames_to_include=None, frames_to_exclude=[],
diff --git a/prjxray/bitstream.py b/prjxray/bitstream.py
index 83b264d..b120b5e 100644
--- a/prjxray/bitstream.py
+++ b/prjxray/bitstream.py
@@ -10,7 +10,7 @@
# SPDX-License-Identifier: ISC
import json
import os
-from prjxray import util
+from prjxray.util import block_type_s2i
# Break frames into WORD_SIZE bit words.
WORD_SIZE_BITS = 32
@@ -119,7 +119,7 @@
"""Convert a deconstructed address to a 32 bit word"""
# https://www.xilinx.com/support/documentation/user_guides/ug470_7Series_Config.pdf
ret = 0
- ret |= util.block_type_s2i[block_type] << 23
+ ret |= block_type_s2i[block_type] << 23
ret |= {"top": 0, "bottom": 1}[top_bottom] << 22
ret |= cfg_row << 17
ret |= cfg_col << 7
diff --git a/prjxray/lib.py b/prjxray/lib.py
index 27dd236..20f1aa7 100644
--- a/prjxray/lib.py
+++ b/prjxray/lib.py
@@ -14,6 +14,8 @@
import re
from collections import namedtuple
+from prjxray.util import OpenSafeFile
+
def read_root_csv(root_dir):
""" Reads root.csv from raw db directory.
@@ -24,7 +26,7 @@
tiles = {}
nodes = []
- with open(os.path.join(root_dir, 'root.csv')) as f:
+ with OpenSafeFile(os.path.join(root_dir, 'root.csv')) as f:
for d in csv.DictReader(f):
if d['filetype'] == 'tile':
if d['subtype'] not in tiles:
@@ -123,17 +125,17 @@
import pyjson5 as json5
import progressbar
for node in progressbar.progressbar(nodes):
- with open(node) as f:
+ with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in self.nodes
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_file(self, fname):
- with open(fname, 'rb') as f:
+ with OpenSafeFile(fname, 'rb') as f:
self.nodes = pickle.load(f)
def save_to_file(self, fname):
- with open(fname, 'wb') as f:
+ with OpenSafeFile(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def site_pin_node_to_wires(self, tile, node):
diff --git a/prjxray/lms_solver.py b/prjxray/lms_solver.py
index d0fbb62..e68f566 100755
--- a/prjxray/lms_solver.py
+++ b/prjxray/lms_solver.py
@@ -54,6 +54,8 @@
import numpy as np
import numpy.linalg as linalg
+from prjxray.util import OpenSafeFile
+
# =============================================================================
@@ -83,7 +85,7 @@
segdata = None
all_segdata = []
- with open(file_name, "r") as fp:
+ with OpenSafeFile(file_name, "r") as fp:
for line in fp.readlines():
line = line.strip()
@@ -174,7 +176,7 @@
lines.append(all_tags[r] + " " + " ".join(bits) + "\n")
- with open(file_name, "w") as fp:
+ with OpenSafeFile(file_name, "w") as fp:
for line in lines:
fp.write(line)
@@ -702,7 +704,7 @@
address_map = {}
# Load tilegrid
- with open(tilegrid_file, "r") as fp:
+ with OpenSafeFile(tilegrid_file, "r") as fp:
tilegrid = json.load(fp)
# Loop over tiles
@@ -982,7 +984,7 @@
# Dump to CSV
if args.x is not None:
- with open(args.x, "w") as fp:
+ with OpenSafeFile(args.x, "w") as fp:
dump_solution_to_csv(fp, tags_to_solve, bits_to_solve, X)
# Dump results
diff --git a/prjxray/node_lookup.py b/prjxray/node_lookup.py
index 8b684f1..a6278ef 100644
--- a/prjxray/node_lookup.py
+++ b/prjxray/node_lookup.py
@@ -13,6 +13,7 @@
import pyjson5 as json5
import os.path
+from prjxray.util import OpenSafeFile
def create_tables(conn):
c = conn.cursor()
@@ -63,7 +64,7 @@
nodes_processed = set()
for node in progressbar.progressbar(nodes):
- with open(node) as f:
+ with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in nodes_processed
nodes_processed.add(node_wires['node'])
diff --git a/prjxray/segmaker.py b/prjxray/segmaker.py
index 4d6795f..542fcc8 100644
--- a/prjxray/segmaker.py
+++ b/prjxray/segmaker.py
@@ -24,7 +24,7 @@
'''
import os, json, re
-from prjxray import util
+from prjxray.util import OpenSafeFile, get_db_root, get_fabric
BLOCK_TYPES = set(('CLB_IO_CLK', 'BLOCK_RAM', 'CFG_CLB'))
@@ -85,12 +85,12 @@
def __init__(self, bitsfile, verbose=None, db_root=None, fabric=None):
self.db_root = db_root
if self.db_root is None:
- self.db_root = util.get_db_root()
+ self.db_root = get_db_root()
assert self.db_root, "No db root specified."
self.fabric = fabric
if self.fabric is None:
- self.fabric = util.get_fabric()
+ self.fabric = get_fabric()
assert self.fabric, "No fabric specified."
self.verbose = verbose if verbose is not None else os.getenv(
@@ -129,7 +129,7 @@
def load_grid(self):
'''Load self.grid holding tile addresses'''
- with open(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
+ with OpenSafeFile(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
"r") as f:
self.grid = json.load(f)
assert "segments" not in self.grid, "Old format tilegrid.json"
@@ -152,7 +152,7 @@
'''
self.bits = dict()
print("Loading bits from %s." % bitsfile)
- with open(bitsfile, "r") as f:
+ with OpenSafeFile(bitsfile, "r") as f:
for line in f:
# ex: bit_00020500_000_17
line = line.split("_")
@@ -446,7 +446,7 @@
segments = self.segments_by_type[segtype]
if segments:
print("Writing %s." % filename)
- with open(filename, "w") as f:
+ with OpenSafeFile(filename, "w") as f:
for segname, segdata in sorted(segments.items()):
# seg 00020300_010
print("seg %s" % segname, file=f)
diff --git a/prjxray/tile.py b/prjxray/tile.py
index cb008a4..68da011 100644
--- a/prjxray/tile.py
+++ b/prjxray/tile.py
@@ -13,6 +13,7 @@
import json
from prjxray import lib
from prjxray.timing import fast_slow_tuple_to_corners, RcElement
+from prjxray.util import OpenSafeFile
TileDbs = namedtuple(
'TileDbs', 'segbits block_ram_segbits ppips mask tile_type')
@@ -313,7 +314,7 @@
backward_timing=get_pip_timing(pip.get('dst_to_src')),
)
- with open(self.tile_dbs.tile_type) as f:
+ with OpenSafeFile(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = get_wires(tile_type['wires'])
diff --git a/prjxray/tile_segbits.py b/prjxray/tile_segbits.py
index 8fd6b15..5564a4b 100644
--- a/prjxray/tile_segbits.py
+++ b/prjxray/tile_segbits.py
@@ -10,8 +10,8 @@
# SPDX-License-Identifier: ISC
from collections import namedtuple
from prjxray import bitstream
-from prjxray import util
from prjxray.grid_types import BlockType
+from prjxray.util import OpenSafeFile
import enum
@@ -84,22 +84,16 @@
self.feature_addresses = {}
if tile_db.ppips is not None:
- with open(tile_db.ppips) as f:
- util.lock_file(f, 10)
+ with OpenSafeFile(tile_db.ppips) as f:
self.ppips = read_ppips(f)
- util.unlock_file(f)
if tile_db.segbits is not None:
- with open(tile_db.segbits) as f:
- util.lock_file(f, 10)
+ with OpenSafeFile(tile_db.segbits) as f:
self.segbits[BlockType.CLB_IO_CLK] = read_segbits(f)
- util.unlock_file(f)
if tile_db.block_ram_segbits is not None:
- with open(tile_db.block_ram_segbits) as f:
- util.lock_file(f, 10)
+ with OpenSafeFile(tile_db.block_ram_segbits) as f:
self.segbits[BlockType.BLOCK_RAM] = read_segbits(f)
- util.unlock_file(f)
for block_type in self.segbits:
for feature in self.segbits[block_type]:
diff --git a/prjxray/tile_segbits_alias.py b/prjxray/tile_segbits_alias.py
index 1e8a379..2eb536e 100644
--- a/prjxray/tile_segbits_alias.py
+++ b/prjxray/tile_segbits_alias.py
@@ -21,6 +21,7 @@
from prjxray import bitstream
from prjxray.grid_types import Bits
from prjxray.tile_segbits import read_ppips
+from prjxray.util import OpenSafeFile
class TileSegbitsAlias(object):
@@ -67,7 +68,7 @@
self.ppips = {}
if tile_db.ppips is not None:
- with open(tile_db.ppips) as f:
+ with OpenSafeFile(tile_db.ppips) as f:
self.ppips = read_ppips(f)
self.tile_segbits = db.get_tile_segbits(self.alias_tile_type)
diff --git a/prjxray/util.py b/prjxray/util.py
index 5ce4550..bb2efb0 100644
--- a/prjxray/util.py
+++ b/prjxray/util.py
@@ -18,6 +18,49 @@
from .roi import Roi
+def timeout_handler(signum, frame):
+ raise Exception("ERROR TIMEOUT: could not lock file")
+
+
+class OpenSafeFile:
+ """
+ Opens a file in a thread-safe mode, allowing for safe read and writes
+ to a file that can potentially be modified by multiple processes at
+ the same time.
+ """
+
+ def __init__(self, name, mode="r", timeout=10):
+ self.name = name
+ self.mode = mode
+ self.timeout = timeout
+
+ self.fd = None
+
+ def __enter__(self):
+ self.fd = open(self.name, self.mode)
+ self.lock_file()
+ return self.fd
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.unlock_file()
+ self.fd.close()
+
+ def lock_file(self):
+ assert self.fd is not None
+ try:
+ signal.signal(signal.SIGALRM, timeout_handler)
+ signal.alarm(self.timeout)
+ fcntl.flock(self.fd.fileno(), fcntl.LOCK_EX)
+ signal.alarm(0)
+ except Exception as e:
+ print(f"{e}: {self.name}")
+ exit(1)
+
+ def unlock_file(self):
+ assert self.fd is not None
+ fcntl.flock(self.fd.fileno(), fcntl.LOCK_UN)
+
+
def get_db_root():
# Used during tilegrid db bootstrap
ret = os.getenv("XRAY_DATABASE_ROOT", None)
@@ -44,7 +87,7 @@
filename = os.path.join(db_root, "mapping", "parts.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
- with open(filename, 'r') as stream:
+ with OpenSafeFile(filename, 'r') as stream:
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
part = part_mapping.get(part, None)
assert part, "Part {} not found in {}".format(part, part_mapping)
@@ -53,7 +96,7 @@
def set_part_information(db_root, information):
filename = os.path.join(db_root, "mapping", "parts.yaml")
- with open(filename, 'w+') as stream:
+ with OpenSafeFile(filename, 'w+') as stream:
yaml.dump(information, stream)
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
@@ -63,7 +106,7 @@
filename = os.path.join(file_path, "resources.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
- with open(filename, 'r') as stream:
+ with OpenSafeFile(filename, 'r') as stream:
res_mapping = yaml.load(stream, Loader=yaml.FullLoader)
res = res_mapping.get(part, None)
assert res, "Part {} not found in {}".format(part, part_mapping)
@@ -72,7 +115,7 @@
def set_part_resources(file_path, information):
filename = os.path.join(file_path, "resources.yaml")
- with open(filename, 'w+') as stream:
+ with OpenSafeFile(filename, 'w+') as stream:
yaml.dump(information, stream)
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
@@ -83,7 +126,7 @@
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
part = get_part_information(db_root, part)
- with open(filename, 'r') as stream:
+ with OpenSafeFile(filename, 'r') as stream:
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
device = device_mapping.get(part['device'], None)
assert device, "Device {} not found in {}".format(
@@ -95,7 +138,7 @@
filename = os.path.join(db_root, "mapping", "devices.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
- with open(filename, 'r') as stream:
+ with OpenSafeFile(filename, 'r') as stream:
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
return device_mapping
@@ -104,7 +147,7 @@
filename = os.path.join(db_root, "mapping", "parts.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
- with open(filename, 'r') as stream:
+ with OpenSafeFile(filename, 'r') as stream:
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
return part_mapping
@@ -255,11 +298,9 @@
def parse_db_lines(fn):
- with open(fn, "r") as f:
- lock_file(f, 10)
+ with OpenSafeFile(fn, "r") as f:
for line in f:
yield line, parse_db_line(line)
- unlock_file(f)
def write_db_lines(fn, entries, track_origin=False):
@@ -272,11 +313,9 @@
new_line = " ".join([tag] + sorted(bits))
new_lines.append(new_line)
- with open(fn, "w") as f:
- lock_file(f, 10)
+ with OpenSafeFile(fn, "w") as f:
for line in sorted(new_lines):
print(line, file=f)
- unlock_file(f)
def parse_tagbit(x):
@@ -409,21 +448,3 @@
parser.add_argument(
'--no-' + dashed, dest=dest, action='store_false', **kwargs)
-
-def timeout_handler(signum, frame):
- raise Exception("ERROR: could not lock file!")
-
-
-def lock_file(fd, timeout):
- try:
- signal.signal(signal.SIGALRM, timeout_handler)
- signal.alarm(timeout)
- fcntl.flock(fd.fileno(), fcntl.LOCK_EX)
- signal.alarm(0)
- except Exception as e:
- print(e)
- exit(1)
-
-
-def unlock_file(fd):
- fcntl.flock(fd.fileno(), fcntl.LOCK_UN)
diff --git a/utils/bit2fasm.py b/utils/bit2fasm.py
index d36a0ba..103b377 100755
--- a/utils/bit2fasm.py
+++ b/utils/bit2fasm.py
@@ -16,9 +16,10 @@
import os
import fasm
import fasm.output
-from prjxray.db import Database
from prjxray import fasm_disassembler
from prjxray import bitstream
+from prjxray.db import Database
+from prjxray.util import OpenSafeFile
import subprocess
import tempfile
@@ -41,7 +42,7 @@
grid = db.grid()
disassembler = fasm_disassembler.FasmDisassembler(db)
- with open(bits_file) as f:
+ with OpenSafeFile(bits_file) as f:
bitdata = bitstream.load_bitdata(f)
model = fasm.output.merge_and_sort(
diff --git a/utils/create_timing_worksheet_db.py b/utils/create_timing_worksheet_db.py
index e6dd4cf..54ca7c7 100644
--- a/utils/create_timing_worksheet_db.py
+++ b/utils/create_timing_worksheet_db.py
@@ -21,6 +21,7 @@
PassTransistor, IntristicDelay, RcElement, PvtCorner
from prjxray.math_models import ExcelMathModel
from prjxray.db import Database
+from prjxray.util import OpenSafeFile
from prjxray import util
FAST = PvtCorner.FAST
@@ -481,7 +482,7 @@
def build_wire_filter(wire_filter):
wires_to_include = set()
- with open(wire_filter) as f:
+ with OpenSafeFile(wire_filter) as f:
for l in f:
wire = l.strip()
if not wire:
@@ -514,7 +515,7 @@
args = parser.parse_args()
- with open(args.timing_json) as f:
+ with OpenSafeFile(args.timing_json) as f:
timing = json.load(f)
db = Database(args.db_root, args.part)
diff --git a/utils/dbfixup.py b/utils/dbfixup.py
index 0e3310d..8005e51 100755
--- a/utils/dbfixup.py
+++ b/utils/dbfixup.py
@@ -218,7 +218,7 @@
lines = []
llast = None
- with open(fn_in, "r") as f:
+ with util.OpenSafeFile(fn_in, "r") as f:
for line in f:
# Hack: skip duplicate lines
# This happens while merging a new multibit entry
@@ -327,7 +327,7 @@
mask_db_file = "%s/mask_%s.db" % (db_root, mask_db)
if os.path.exists(mask_db_file):
- with open(mask_db_file, "r") as f:
+ with util.OpenSafeFile(mask_db_file, "r") as f:
for line in f:
line = line.split()
assert len(line) == 2
@@ -340,7 +340,7 @@
if not os.path.exists(seg_db_file):
continue
- with open(seg_db_file, "r") as f:
+ with util.OpenSafeFile(seg_db_file, "r") as f:
for line in f:
line = line.split()
for bit in line[1:]:
@@ -353,7 +353,7 @@
bits.add(bit)
if len(bits) > 0:
- with open(mask_db_file, "w") as f:
+ with util.OpenSafeFile(mask_db_file, "w") as f:
for bit in sorted(bits):
print("bit %s" % bit, file=f)
@@ -361,14 +361,15 @@
def load_zero_db(fn):
# Remove comments and convert to list of lines
ret = []
- for l in open(fn, "r"):
- pos = l.find("#")
- if pos >= 0:
- l = l[0:pos]
- l = l.strip()
- if not l:
- continue
- ret.append(l)
+ with util.OpenSafeFile(fn, "r") as f:
+ for l in f:
+ pos = l.find("#")
+ if pos >= 0:
+ l = l[0:pos]
+ l = l.strip()
+ if not l:
+ continue
+ ret.append(l)
return ret
@@ -535,11 +536,9 @@
)
changes += new_changes
- with open(fn_out, "w") as f:
- util.lock_file(f, 10)
+ with util.OpenSafeFile(fn_out, "w") as f:
for line in sorted(lines):
print(line, file=f)
- util.unlock_file(f)
if changes is not None:
seg_files += 1
@@ -654,7 +653,7 @@
tag_groups = []
# Load tag group specifications
- with open(file_name, "r") as fp:
+ with util.OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()
diff --git a/utils/fasm2frames.py b/utils/fasm2frames.py
index fe384db..82ddc35 100755
--- a/utils/fasm2frames.py
+++ b/utils/fasm2frames.py
@@ -23,6 +23,7 @@
from prjxray import fasm_assembler, util
from prjxray.db import Database
from prjxray.roi import Roi
+from prjxray.util import OpenSafeFile
import sys
@@ -133,11 +134,11 @@
bank_to_tile = defaultdict(lambda: set())
if part is not None:
- with open(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
+ with OpenSafeFile(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
reader = csv.DictReader(fp)
package_pins = [l for l in reader]
- with open(os.path.join(db_root, part, "part.json"), "r") as fp:
+ with OpenSafeFile(os.path.join(db_root, part, "part.json"), "r") as fp:
part_data = json.load(fp)
for bank, loc in part_data["iobanks"].items():
@@ -167,7 +168,7 @@
extra_features = []
if roi:
- with open(roi) as f:
+ with OpenSafeFile(roi) as f:
roi_j = json.load(f)
x1 = roi_j['info']['GRID_X_MIN']
x2 = roi_j['info']['GRID_X_MAX']
diff --git a/utils/find_missing_segbits.py b/utils/find_missing_segbits.py
index 7f06227..a8085a3 100755
--- a/utils/find_missing_segbits.py
+++ b/utils/find_missing_segbits.py
@@ -24,6 +24,8 @@
import os
import re
+from prjxray.util import OpenSafeFile
+
# =============================================================================
@@ -33,7 +35,7 @@
PIP name strings. Names are formatted as <dst_wire>.<src_wire>
"""
- with open(tile_file, "r") as fp:
+ with OpenSafeFile(tile_file, "r") as fp:
root = json.load(fp)
pips = root["pips"]
@@ -53,7 +55,7 @@
"""
ppips = {}
- with open(ppips_file, "r") as fp:
+ with OpenSafeFile(ppips_file, "r") as fp:
for line in fp.readlines():
line = line.split()
if len(line) == 2:
@@ -70,7 +72,7 @@
"""
segbits = []
- with open(segbits_file, "r") as fp:
+ with OpenSafeFile(segbits_file, "r") as fp:
for line in fp.readlines():
line = line.split()
if len(line) > 1:
diff --git a/utils/group.py b/utils/group.py
index 34fa198..b8f59b5 100755
--- a/utils/group.py
+++ b/utils/group.py
@@ -21,6 +21,8 @@
import re
import itertools
+from prjxray.util import OpenSafeFile
+
# =============================================================================
@@ -34,7 +36,7 @@
tag_groups = []
# Load tag group specifications
- with open(file_name, "r") as fp:
+ with OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()
@@ -89,7 +91,7 @@
segbits = {}
- with open(file_name, "r") as fp:
+ with OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()
fields = line.split()
@@ -114,7 +116,7 @@
Save segbits to a .db or .rdb file
"""
- with open(file_name, "w") as fp:
+ with OpenSafeFile(file_name, "w") as fp:
for tag, bits in segbits.items():
if isinstance(bits, str):
diff --git a/utils/groupmask.py b/utils/groupmask.py
index 87da46c..504c86d 100644
--- a/utils/groupmask.py
+++ b/utils/groupmask.py
@@ -10,7 +10,7 @@
# SPDX-License-Identifier: ISC
import sys, os, re
-from prjxray import util
+from prjxray.util import OpenSafeFile, parse_db_lines, write_db_lines
def index_masks(fn_in, groups_in):
@@ -21,7 +21,7 @@
groups[group] = set()
# Index bits
- for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
+ for line, (tag, bits, mode) in parse_db_lines(fn_in):
assert not mode, "Unresolved tag: %s" % (line, )
prefix = tag[0:tag.rfind(".")]
group = groups.get(prefix, None)
@@ -42,7 +42,7 @@
def apply_masks(fn_in, groups):
"""Add 0 entries ("!") to .db entries based on groups definition"""
new_db = {}
- for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
+ for line, (tag, bits, mode) in parse_db_lines(fn_in):
assert not mode, "Unresolved tag: %s" % (line, )
prefix = tag[0:tag.rfind(".")]
group = groups.get(prefix, None)
@@ -58,8 +58,9 @@
def load_groups(fn):
ret = []
- for l in open(fn, "r"):
- ret.append(l.strip())
+ with OpenSafeFile(fn, "r") as f:
+ for l in f:
+ ret.append(l.strip())
return ret
@@ -67,7 +68,7 @@
groups_in = load_groups(groups_fn)
groups = index_masks(fn_in, groups_in)
new_db = apply_masks(fn_in, groups)
- util.write_db_lines(fn_out, new_db)
+ write_db_lines(fn_out, new_db)
def main():
diff --git a/utils/info_md.py b/utils/info_md.py
index 00638e8..d5e6411 100755
--- a/utils/info_md.py
+++ b/utils/info_md.py
@@ -15,6 +15,7 @@
import parse as format_parser
import subprocess
import sys
+
"""Module for generating the Info.md file found in the database directory."""
info_md_header = """
diff --git a/utils/make_ports.py b/utils/make_ports.py
index 27eecda..ffe7637 100644
--- a/utils/make_ports.py
+++ b/utils/make_ports.py
@@ -23,6 +23,8 @@
from collections import defaultdict
+from prjxray.util import OpenSafeFile
+
def main():
@@ -49,7 +51,7 @@
args = parser.parse_args()
# Load pin dump
- with open(args.csv, "r") as fp:
+ with OpenSafeFile(args.csv, "r") as fp:
pin_dump = list(csv.DictReader(fp))
# Group pins into ports
@@ -103,7 +105,7 @@
port["width"] += 1
# Write pin ports to a JSON file
- with open(args.json, "w") as fp:
+ with OpenSafeFile(args.json, "w") as fp:
json.dump(ports, fp, indent=1, sort_keys=True)
diff --git a/utils/makesdf.py b/utils/makesdf.py
index 6ba2180..bfb2e14 100644
--- a/utils/makesdf.py
+++ b/utils/makesdf.py
@@ -12,6 +12,7 @@
import json
import argparse
+from prjxray.util import OpenSafeFile
def get_elems_count(timings, slice, site, bel_type):
combinational = 0
@@ -113,7 +114,7 @@
"""
)"""
- with open(outdir + '/' + slice + '.sdf', "w") as fp:
+ with OpenSafeFile(outdir + '/' + slice + '.sdf', "w") as fp:
fp.write(sdf)
@@ -125,7 +126,7 @@
args = parser.parse_args()
- with open(args.json, 'r') as fp:
+ with OpenSafeFile(args.json, 'r') as fp:
timings = json.load(fp)
produce_sdf(timings, args.sdf)
diff --git a/utils/maskview.py b/utils/maskview.py
index 16cf5b3..663377a 100755
--- a/utils/maskview.py
+++ b/utils/maskview.py
@@ -24,6 +24,8 @@
import argparse
import re
+from prjxray.util import OpenSafeFile
+
# =============================================================================
@@ -32,7 +34,7 @@
Read bits from a .db or .rdb file. Ignores tags and bit values.
"""
- with open(file_name, "r") as fp:
+ with OpenSafeFile(file_name, "r") as fp:
lines = fp.readlines()
bits = set()
diff --git a/utils/parsedb.py b/utils/parsedb.py
index 157e416..72b8601 100755
--- a/utils/parsedb.py
+++ b/utils/parsedb.py
@@ -10,11 +10,12 @@
# SPDX-License-Identifier: ISC
import sys, re
-from prjxray import util
+from prjxray.util import OpenSafeFile, db_root_arg, parse_db_line
def run(fnin, fnout=None, strict=False, verbose=False):
- lines = open(fnin, 'r').read().split('\n')
+ with OpenSafeFile(fnin) as f:
+ lines = f.read().split('\n')
tags = dict()
bitss = dict()
for line in lines:
@@ -24,7 +25,7 @@
# TODO: figure out what to do with masks
if line.startswith("bit "):
continue
- tag, bits, mode, _ = util.parse_db_line(line)
+ tag, bits, mode, _ = parse_db_line(line)
if strict:
if mode != "always":
assert not mode, "strict: got ill defined line: %s" % (line, )
@@ -39,7 +40,7 @@
bitss[bits] = tag
if fnout:
- with open(fnout, "w") as fout:
+ with OpenSafeFile(fnout, "w") as fout:
for line in sorted(lines):
line = line.strip()
if line == '':
@@ -53,7 +54,7 @@
parser = argparse.ArgumentParser(
description="Parse a db file, checking for consistency")
- util.db_root_arg(parser)
+ db_root_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument(
'--strict',
diff --git a/utils/sdfmerge.py b/utils/sdfmerge.py
index 75ea7b5..c38365d 100644
--- a/utils/sdfmerge.py
+++ b/utils/sdfmerge.py
@@ -11,7 +11,9 @@
import argparse
import json
+
from sdf_timing import sdfparse
+from prjxray.util import OpenSafeFile
def merge(timings_list, site):
@@ -59,15 +61,16 @@
timings_list = list()
for sdf in args.sdfs:
- with open(sdf, 'r') as fp:
+ with OpenSafeFile(sdf, 'r') as fp:
timing = sdfparse.parse(fp.read())
timings_list.append(timing)
merged_sdf = merge(timings_list, args.site)
- open(args.out, 'w').write(sdfparse.emit(merged_sdf, timescale='1ns'))
+ with OpenSafeFile(args.out, 'w') as fp:
+ fp.write(sdfparse.emit(merged_sdf, timescale='1ns'))
if args.json is not None:
- with open(args.json, 'w') as fp:
+ with OpenSafeFile(args.json, 'w') as fp:
json.dump(merged_sdf, fp, indent=4, sort_keys=True)
diff --git a/utils/segprint.py b/utils/segprint.py
index 87e8ce7..35fba63 100755
--- a/utils/segprint.py
+++ b/utils/segprint.py
@@ -19,7 +19,7 @@
import copy
from prjxray import bitstream
from prjxray import db as prjxraydb
-from prjxray import util
+from prjxray.util import OpenSafeFile, parse_tagbit, db_root_arg, part_arg
class NoDB(Exception):
@@ -39,7 +39,7 @@
verbose and print("process_db(%s): %s" % (tile_type, fns))
for fn in fns:
if fn:
- with open(fn, "r") as f:
+ with OpenSafeFile(fn, "r") as f:
for line in f:
process(line)
@@ -61,7 +61,7 @@
return
tagbits = []
else:
- tagbits = [util.parse_tagbit(x) for x in parts[1:]]
+ tagbits = [parse_tagbit(x) for x in parts[1:]]
tags.append(list([name] + tagbits))
@@ -430,7 +430,7 @@
def load_tiles(db_root, part):
# TODO: Migrate to new tilegrid format via library.
- with open("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
+ with OpenSafeFile("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
tiles = json.load(f)
return tiles
@@ -449,7 +449,8 @@
db = prjxraydb.Database(db_root, part)
tiles = load_tiles(db_root, part)
segments = mk_segments(tiles)
- bitdata = bitstream.load_bitdata2(open(bits_file, "r"))
+ with OpenSafeFile(bits_file) as f:
+ bitdata = bitstream.load_bitdata2(f)
if flag_unknown_bits:
print_unknown_bits(tiles, bitdata)
@@ -486,8 +487,8 @@
parser = argparse.ArgumentParser(
description="Decode bits within a tile's address space")
- util.db_root_arg(parser)
- util.part_arg(parser)
+ db_root_arg(parser)
+ part_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument(
'-z',
diff --git a/utils/segview.py b/utils/segview.py
index 47ef88d..8031fdc 100755
--- a/utils/segview.py
+++ b/utils/segview.py
@@ -21,6 +21,8 @@
import itertools
+from prjxray.util import OpenSafeFile
+
# =============================================================================
@@ -69,7 +71,7 @@
# Load segbits
segbits = {}
- with open(file_name, "r") as fp:
+ with OpenSafeFile(file_name, "r") as fp:
lines = fp.readlines()
# Parse lines
diff --git a/utils/simpleroute.py b/utils/simpleroute.py
index 0ba969b..dfd10d1 100755
--- a/utils/simpleroute.py
+++ b/utils/simpleroute.py
@@ -12,6 +12,7 @@
import sys, os, json
import pickle
+from prjxray.util import OpenSafeFile
class MergeFind:
def __init__(self):
@@ -34,11 +35,11 @@
def db_gen():
print("Reading database..")
- with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
+ with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tilegrid = json.load(f)
- with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
+ with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)
@@ -81,7 +82,7 @@
reverse_node_node = dict()
for tile_type in ["int_l", "int_r"]:
- with open("%s/%s/segbits_%s.db" %
+ with OpenSafeFile("%s/%s/segbits_%s.db" %
(os.getenv("XRAY_DATABASE_DIR"), os.getenv("XRAY_DATABASE"),
tile_type), "r") as f:
for line in f:
diff --git a/utils/sort_db.py b/utils/sort_db.py
index 2d09a86..b94110b 100755
--- a/utils/sort_db.py
+++ b/utils/sort_db.py
@@ -65,8 +65,8 @@
import json
import utils.xjson as xjson
import utils.cmp as cmp
-from prjxray.util import lock_file, unlock_file
+from prjxray.util import OpenSafeFile
def split_all(s, chars):
"""Split on multiple character values.
@@ -323,7 +323,8 @@
else:
return False
- lines = open(pathname).readlines()
+ with OpenSafeFile(pathname) as f:
+ lines = f.readlines()
tosort = []
for l in lines:
@@ -334,22 +335,10 @@
tosort.sort(key=cmp.cmp_key)
- # Make sure the sort is stable
- #for i in range(0, 4):
- # copy = tosort.copy()
- # random.shuffle(copy)
- # copy.sort(key=cmp.cmp_key)
- # assert len(copy) == len(tosort)
- # for i in range(0, len(copy)):
- # assert copy[i] == tosort[i], "\n%r\n != \n%r\n" % (
- # copy[i], tosort[i])
-
with open(pathname, 'w') as f:
- lock_file(f, 10)
for _, l in tosort:
f.write(l)
f.write('\n')
- unlock_file(f)
return True
@@ -387,13 +376,15 @@
def sort_json(filename):
"""Sort a XXX.json file."""
+
try:
- d = json.load(open(filename))
+ with OpenSafeFile(filename) as f:
+ d = json.load(f)
except json.JSONDecodeError as e:
print(e)
return False
- with open(filename, 'w') as f:
+ with OpenSafeFile(filename, 'w') as f:
xjson.pprint(f, d)
return True
@@ -401,17 +392,15 @@
def sort_db_text(n):
rows = []
- with open(n) as f:
+ with OpenSafeFile(n) as f:
for l in f:
rows.append(([extract_num(s) for s in l.split()], l))
rows.sort(key=lambda i: i[0])
- with open(n, 'w') as f:
- lock_file(f, 10)
+ with OpenSafeFile(n, 'w') as f:
for l in rows:
f.write(l[-1])
- unlock_file(f)
return True
diff --git a/utils/sp6_bitstream_analyzer.py b/utils/sp6_bitstream_analyzer.py
index f70b65d..40296f0 100755
--- a/utils/sp6_bitstream_analyzer.py
+++ b/utils/sp6_bitstream_analyzer.py
@@ -25,6 +25,8 @@
import argparse
from io import StringIO
+from prjxray.util import OpenSafeFile
+
conf_regs = {
0: "CRC",
1: "FAR_MAJ",
@@ -124,7 +126,7 @@
self.curr_fdri_write_len = 0
self.curr_crc_check = 0
self.fdri_in_progress = False
- with open(file_name, "rb") as f:
+ with OpenSafeFile(file_name, "rb") as f:
self.bytes = f.read()
pos, self.header = self.get_header()
self.body = [
@@ -395,7 +397,7 @@
else:
frame_stream.write(
"#{:3}:{:6},".format(i % 65, hex(self.frame_data[i])))
- with open(file_name, "w") as f:
+ with OpenSafeFile(file_name, "w") as f:
print(frame_stream.getvalue(), file=f)
def write_frames(self, file_name):
@@ -409,7 +411,7 @@
frame_stream.write("\n")
elif i < len(self.frame_data) - 1:
frame_stream.write(",")
- with open(file_name, "w") as f:
+ with OpenSafeFile(file_name, "w") as f:
print(frame_stream.getvalue(), file=f)
diff --git a/utils/test_fasm2frames.py b/utils/test_fasm2frames.py
index 133c171..ef957b3 100755
--- a/utils/test_fasm2frames.py
+++ b/utils/test_fasm2frames.py
@@ -19,6 +19,7 @@
import prjxray
import utils.fasm2frames as fasm2frames
+from prjxray.util import OpenSafeFile
from textx.exceptions import TextXSyntaxError
@@ -69,7 +70,7 @@
return os.path.join(os.path.dirname(__file__), 'test_data', fname)
def get_test_data(self, fname):
- with open(self.filename_test_data(fname)) as f:
+ with OpenSafeFile(self.filename_test_data(fname)) as f:
return f.read()
def fasm2frames(self, fin_data, **kw):
diff --git a/utils/tileconnloops.py b/utils/tileconnloops.py
index ab55b4a..4fffac9 100755
--- a/utils/tileconnloops.py
+++ b/utils/tileconnloops.py
@@ -14,13 +14,14 @@
import os, sys, json
+from prjxray.util import OpenSafeFile
def main():
- with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
+ with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tilegrid = json.load(f)
- with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
+ with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)
diff --git a/utils/tileconnwire.py b/utils/tileconnwire.py
index 368d2df..21ef9f5 100755
--- a/utils/tileconnwire.py
+++ b/utils/tileconnwire.py
@@ -11,13 +11,14 @@
import os, sys, json
+from prjxray.util import OpenSafeFile
def main(argv):
if len(argv) != 3:
print("Usage example: python3 %s HCLK_R HCLK_SW6E3" % sys.argv[0])
sys.exit(1)
- with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
+ with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)
diff --git a/utils/update_resources.py b/utils/update_resources.py
index d5d3b26..bc5c8f5 100755
--- a/utils/update_resources.py
+++ b/utils/update_resources.py
@@ -15,7 +15,7 @@
import re
import tempfile
import json
-from prjxray import util
+from prjxray.util import OpenSafeFile, db_root_arg, get_parts, set_part_resources
def main():
@@ -31,7 +31,7 @@
'family',
help="Name of the device family.",
choices=['artix7', 'kintex7', 'zynq7', 'spartan7'])
- util.db_root_arg(parser)
+ db_root_arg(parser)
args = parser.parse_args()
env = os.environ.copy()
@@ -40,7 +40,7 @@
os.getenv('XRAY_DIR'), 'settings', args.family)
information = {}
- parts = util.get_parts(args.db_root)
+ parts = get_parts(args.db_root)
processed_parts = dict()
for part in parts.keys():
# Skip parts which differ only in the speedgrade, as they have the same pins
@@ -64,7 +64,7 @@
cwd=cwd,
stdout=subprocess.PIPE)
- with open(tmp_file, "r") as fp:
+ with OpenSafeFile(tmp_file, "r") as fp:
pins_json = json.load(fp)
os.remove(tmp_file)
@@ -81,7 +81,7 @@
processed_parts[common_part] = {'pins': pins}
# Overwrites the <family>/resources.yaml file completly with new data
- util.set_part_resources(resource_path, information)
+ set_part_resources(resource_path, information)
if __name__ == '__main__':
diff --git a/utils/verify_tile_connections.py b/utils/verify_tile_connections.py
index b9b7238..213bdcf 100755
--- a/utils/verify_tile_connections.py
+++ b/utils/verify_tile_connections.py
@@ -18,7 +18,7 @@
import pyjson5 as json5
import json
import sys
-from prjxray import util
+from prjxray.util import OpenSafeFile, db_root_arg, part_arg
def full_wire_name(wire_in_grid):
@@ -64,7 +64,7 @@
def read_json5(fname):
- with open(fname, 'r') as f:
+ with OpenSafeFile(fname, 'r') as f:
return json5.load(f)
@@ -72,8 +72,8 @@
parser = argparse.ArgumentParser(
description="Tests database against raw node list.")
- util.db_root_arg(parser)
- util.part_arg(parser)
+ db_root_arg(parser)
+ part_arg(parser)
parser.add_argument('--raw_node_root', required=True)
parser.add_argument('--error_nodes', default="error_nodes.json")
parser.add_argument('--ignored_wires')
@@ -109,7 +109,7 @@
if len(error_nodes) > 0:
if args.ignored_wires:
- with open(args.ignored_wires, 'r') as f:
+ with OpenSafeFile(args.ignored_wires, 'r') as f:
ignored_wires = [l.strip() for l in f.readlines()]
print(
@@ -119,7 +119,7 @@
args.error_nodes,
))
- with open(args.error_nodes, 'w') as f:
+ with OpenSafeFile(args.error_nodes, 'w') as f:
json.dump(error_nodes, f, indent=2)
if not args.ignored_wires:
diff --git a/utils/xjson.py b/utils/xjson.py
index d45cfa0..995824a 100755
--- a/utils/xjson.py
+++ b/utils/xjson.py
@@ -11,6 +11,7 @@
import sys
import json
from prjxray.xjson import pprint
+from prjxray.util import OpenSafeFile
if __name__ == "__main__":
if len(sys.argv) == 1:
@@ -18,5 +19,6 @@
doctest.testmod()
else:
assert len(sys.argv) == 2
- d = json.load(open(sys.argv[1]))
+ with OpenSafeFile(sys.argv[1]) as f:
+ d = json.load(f)
pprint(sys.stdout, d)