diff --git a/icebox/icebox.py b/icebox/icebox.py index a1c0c096ef..9e185e9eb6 100644 --- a/icebox/icebox.py +++ b/icebox/icebox.py @@ -622,7 +622,7 @@ def rlookup_funcnet(self, x, y, netname): return funcnets def ultraplus_follow_corner(self, corner, direction, netname): - m = re_match_cached("span4_(horz|vert)_([lrtb])_(\d+)$", netname) + m = re_match_cached(r"span4_(horz|vert)_([lrtb])_(\d+)$", netname) if not m: return None cur_edge = m.group(2) @@ -744,8 +744,8 @@ def follow_net(self, netspec): s = self.ultraplus_follow_corner(self.get_corner(s[0], s[1]), direction, n) if s is None: continue - elif re_match_cached("span4_(vert|horz)_[lrtb]_\d+$", n) and not self.is_ultra(): - m = re_match_cached("span4_(vert|horz)_([lrtb])_\d+$", n) + elif re_match_cached(r"span4_(vert|horz)_[lrtb]_\d+$", n) and not self.is_ultra(): + m = re_match_cached(r"span4_(vert|horz)_([lrtb])_\d+$", n) vert_net = n.replace("_l_", "_t_").replace("_r_", "_b_").replace("_horz_", "_vert_") horz_net = n.replace("_t_", "_l_").replace("_b_", "_r_").replace("_vert_", "_horz_") @@ -1097,7 +1097,7 @@ def match(self, pattern): valid_sp12_v_b = set(range(24)) def sp4h_normalize(netname, edge=""): - m = re_match_cached("sp4_h_([lr])_(\d+)$", netname) + m = re_match_cached(r"sp4_h_([lr])_(\d+)$", netname) assert m if not m: return None cur_edge = m.group(1) @@ -1120,7 +1120,7 @@ def sp4h_normalize(netname, edge=""): # "Normalization" of span4 (not just sp4) is needed during Ultra/UltraPlus # corner tracing def ultra_span4_horz_normalize(netname, edge=""): - m = re_match_cached("span4_horz_([rl])_(\d+)$", netname) + m = re_match_cached(r"span4_horz_([rl])_(\d+)$", netname) assert m if not m: return None cur_edge = m.group(1) @@ -1146,7 +1146,7 @@ def ultra_span4_horz_normalize(netname, edge=""): assert False def sp4v_normalize(netname, edge=""): - m = re_match_cached("sp4_v_([bt])_(\d+)$", netname) + m = re_match_cached(r"sp4_v_([bt])_(\d+)$", netname) assert m if not m: return None cur_edge = m.group(1) @@ -1168,7 +1168,7 @@ def sp4v_normalize(netname, edge=""): return netname def sp12h_normalize(netname, edge=""): - m = re_match_cached("sp12_h_([lr])_(\d+)$", netname) + m = re_match_cached(r"sp12_h_([lr])_(\d+)$", netname) assert m if not m: return None cur_edge = m.group(1) @@ -1190,7 +1190,7 @@ def sp12h_normalize(netname, edge=""): return netname def sp12v_normalize(netname, edge=""): - m = re_match_cached("sp12_v_([bt])_(\d+)$", netname) + m = re_match_cached(r"sp12_v_([bt])_(\d+)$", netname) assert m if not m: return None cur_edge = m.group(1) @@ -1253,104 +1253,104 @@ def pos_has_net(pos, netname): def pos_follow_net(pos, direction, netname, is_ultra): if pos == "x" or ((pos in ("l", "r")) and is_ultra): - m = re_match_cached("sp4_h_[lr]_(\d+)$", netname) + m = re_match_cached(r"sp4_h_[lr]_(\d+)$", netname) if m and direction in ("l", "L"): n = sp4h_normalize(netname, "l") if n is not None: if direction == "l" or is_ultra: - n = re_sub_cached("_l_", "_r_", n) + n = re_sub_cached(r"_l_", "_r_", n) n = sp4h_normalize(n) else: - n = re_sub_cached("_l_", "_", n) - n = re_sub_cached("sp4_h_", "span4_horz_", n) + n = re_sub_cached(r"_l_", "_", n) + n = re_sub_cached(r"sp4_h_", "span4_horz_", n) return n if m and direction in ("r", "R"): n = sp4h_normalize(netname, "r") if n is not None: if direction == "r" or is_ultra: - n = re_sub_cached("_r_", "_l_", n) + n = re_sub_cached(r"_r_", "_l_", n) n = sp4h_normalize(n) else: - n = re_sub_cached("_r_", "_", n) - n = re_sub_cached("sp4_h_", "span4_horz_", n) + n = re_sub_cached(r"_r_", "_", n) + n = re_sub_cached(r"sp4_h_", "span4_horz_", n) return n - m = re_match_cached("sp4_v_[tb]_(\d+)$", netname) + m = re_match_cached(r"sp4_v_[tb]_(\d+)$", netname) if m and direction in ("t", "T"): n = sp4v_normalize(netname, "t") if n is not None: if is_ultra and direction == "T" and pos in ("l", "r"): - return re_sub_cached("sp4_v_", "span4_vert_", n) + return re_sub_cached(r"sp4_v_", "span4_vert_", n) elif direction == "t": - n = re_sub_cached("_t_", "_b_", n) + n = re_sub_cached(r"_t_", "_b_", n) n = sp4v_normalize(n) else: - n = re_sub_cached("_t_", "_", n) - n = re_sub_cached("sp4_v_", "span4_vert_", n) + n = re_sub_cached(r"_t_", "_", n) + n = re_sub_cached(r"sp4_v_", "span4_vert_", n) return n if m and direction in ("b", "B"): n = sp4v_normalize(netname, "b") if n is not None: if is_ultra and direction == "B" and pos in ("l", "r"): - return re_sub_cached("sp4_v_", "span4_vert_", n) + return re_sub_cached(r"sp4_v_", "span4_vert_", n) elif direction == "b": - n = re_sub_cached("_b_", "_t_", n) + n = re_sub_cached(r"_b_", "_t_", n) n = sp4v_normalize(n) else: - n = re_sub_cached("_b_", "_", n) - n = re_sub_cached("sp4_v_", "span4_vert_", n) + n = re_sub_cached(r"_b_", "_", n) + n = re_sub_cached(r"sp4_v_", "span4_vert_", n) return n - m = re_match_cached("sp12_h_[lr]_(\d+)$", netname) + m = re_match_cached(r"sp12_h_[lr]_(\d+)$", netname) if m and direction in ("l", "L"): n = sp12h_normalize(netname, "l") if n is not None: if direction == "l" or is_ultra: - n = re_sub_cached("_l_", "_r_", n) + n = re_sub_cached(r"_l_", "_r_", n) n = sp12h_normalize(n) else: - n = re_sub_cached("_l_", "_", n) - n = re_sub_cached("sp12_h_", "span12_horz_", n) + n = re_sub_cached(r"_l_", "_", n) + n = re_sub_cached(r"sp12_h_", "span12_horz_", n) return n if m and direction in ("r", "R"): n = sp12h_normalize(netname, "r") if n is not None: if direction == "r" or is_ultra: - n = re_sub_cached("_r_", "_l_", n) + n = re_sub_cached(r"_r_", "_l_", n) n = sp12h_normalize(n) else: - n = re_sub_cached("_r_", "_", n) - n = re_sub_cached("sp12_h_", "span12_horz_", n) + n = re_sub_cached(r"_r_", "_", n) + n = re_sub_cached(r"sp12_h_", "span12_horz_", n) return n - m = re_match_cached("sp12_v_[tb]_(\d+)$", netname) + m = re_match_cached(r"sp12_v_[tb]_(\d+)$", netname) if m and direction in ("t", "T"): n = sp12v_normalize(netname, "t") if n is not None: if direction == "t": - n = re_sub_cached("_t_", "_b_", n) + n = re_sub_cached(r"_t_", "_b_", n) n = sp12v_normalize(n) elif direction == "T" and pos in ("l", "r"): pass else: - n = re_sub_cached("_t_", "_", n) - n = re_sub_cached("sp12_v_", "span12_vert_", n) + n = re_sub_cached(r"_t_", "_", n) + n = re_sub_cached(r"sp12_v_", "span12_vert_", n) return n if m and direction in ("b", "B"): n = sp12v_normalize(netname, "b") if n is not None: if direction == "b": - n = re_sub_cached("_b_", "_t_", n) + n = re_sub_cached(r"_b_", "_t_", n) n = sp12v_normalize(n) elif direction == "B" and pos in ("l", "r"): pass else: - n = re_sub_cached("_b_", "_", n) - n = re_sub_cached("sp12_v_", "span12_vert_", n) + n = re_sub_cached(r"_b_", "_", n) + n = re_sub_cached(r"sp12_v_", "span12_vert_", n) return n if (pos in ("l", "r" )) and (not is_ultra): - m = re_match_cached("span4_vert_([bt])_(\d+)$", netname) + m = re_match_cached(r"span4_vert_([bt])_(\d+)$", netname) if m: case, idx = direction + m.group(1), int(m.group(2)) if case == "tt": @@ -1363,7 +1363,7 @@ def pos_follow_net(pos, direction, netname, is_ultra): return "span4_vert_t_%d" % idx if pos in ("t", "b" ): - m = re_match_cached("span4_horz_([rl])_(\d+)$", netname) + m = re_match_cached(r"span4_horz_([rl])_(\d+)$", netname) if m: case, idx = direction + m.group(1), int(m.group(2)) if direction == "L": @@ -1380,27 +1380,27 @@ def pos_follow_net(pos, direction, netname, is_ultra): return "span4_horz_l_%d" % idx if pos == "l" and direction == "r" and (not is_ultra): - m = re_match_cached("span4_horz_(\d+)$", netname) + m = re_match_cached(r"span4_horz_(\d+)$", netname) if m: return sp4h_normalize("sp4_h_l_%s" % m.group(1)) - m = re_match_cached("span12_horz_(\d+)$", netname) + m = re_match_cached(r"span12_horz_(\d+)$", netname) if m: return sp12h_normalize("sp12_h_l_%s" % m.group(1)) if pos == "r" and direction == "l" and (not is_ultra): - m = re_match_cached("span4_horz_(\d+)$", netname) + m = re_match_cached(r"span4_horz_(\d+)$", netname) if m: return sp4h_normalize("sp4_h_r_%s" % m.group(1)) - m = re_match_cached("span12_horz_(\d+)$", netname) + m = re_match_cached(r"span12_horz_(\d+)$", netname) if m: return sp12h_normalize("sp12_h_r_%s" % m.group(1)) if pos == "t" and direction == "b": - m = re_match_cached("span4_vert_(\d+)$", netname) + m = re_match_cached(r"span4_vert_(\d+)$", netname) if m: return sp4v_normalize("sp4_v_t_%s" % m.group(1)) - m = re_match_cached("span12_vert_(\d+)$", netname) + m = re_match_cached(r"span12_vert_(\d+)$", netname) if m: return sp12v_normalize("sp12_v_t_%s" % m.group(1)) if pos == "b" and direction == "t": - m = re_match_cached("span4_vert_(\d+)$", netname) + m = re_match_cached(r"span4_vert_(\d+)$", netname) if m: return sp4v_normalize("sp4_v_b_%s" % m.group(1)) - m = re_match_cached("span12_vert_(\d+)$", netname) + m = re_match_cached(r"span12_vert_(\d+)$", netname) if m: return sp12v_normalize("sp12_v_b_%s" % m.group(1)) return None diff --git a/icebox/icebox_colbuf.py b/icebox/icebox_colbuf.py index 3003a90e9a..4153041b7d 100755 --- a/icebox/icebox_colbuf.py +++ b/icebox/icebox_colbuf.py @@ -71,7 +71,7 @@ def make_cache(stmt, raw_db): if bit.startswith("!"): value = "0" bit = bit[1:] - match = re_match_cached("B([0-9]+)\[([0-9]+)\]", bit) + match = re_match_cached(r"B([0-9]+)\[([0-9]+)\]", bit) cache_entry[1].append((int(match.group(1)), int(match.group(2)), value)) cache.append(cache_entry) return cache @@ -121,7 +121,7 @@ def set_colbuf(ic, tile, bit, value): tile_db = ic.tile_db(tile[0], tile[1]) for entry in tile_db: if entry[1] == "ColBufCtrl" and entry[2] == "glb_netwk_%d" % bit: - match = re_match_cached("B([0-9]+)\[([0-9]+)\]", entry[0][0]) + match = re_match_cached(r"B([0-9]+)\[([0-9]+)\]", entry[0][0]) l = tile_dat[int(match.group(1))] n = int(match.group(2)) l = l[:n] + value + l[n+1:] diff --git a/icebox/icebox_hlc2asc.py b/icebox/icebox_hlc2asc.py index 59d2f696b3..61bf52f2a2 100755 --- a/icebox/icebox_hlc2asc.py +++ b/icebox/icebox_hlc2asc.py @@ -509,7 +509,7 @@ def parse_verilog_bitvector_to_bits(in_str): #replace x with 0 in_str = re_sub_cached('[xX]', '0', in_str) - m = re_match_cached("([0-9]+)'([hdob])([0-9a-fA-F]+)", in_str) + m = re_match_cached(r"([0-9]+)'([hdob])([0-9a-fA-F]+)", in_str) if m: num_bits = int(m.group(1)) prefix = m.group(2) @@ -879,7 +879,7 @@ def read(self, fields): if fields[0] == 'lut' and len(fields) == 2: self.lut_bits = fields[1] elif fields[0] == 'out' and len(fields) >= 3 and fields[1] == '=': - m = re_match_cached("([0-9]+)'b([01]+)", fields[2]) + m = re_match_cached(r"([0-9]+)'b([01]+)", fields[2]) if m: lut_bits = parse_verilog_bitvector_to_bits(fields[2]) # Verilog 16'bXXXX is MSB first but the bitstream wants LSB. diff --git a/icebox/icebox_maps.py b/icebox/icebox_maps.py index 062335d124..098b7ee0b0 100755 --- a/icebox/icebox_maps.py +++ b/icebox/icebox_maps.py @@ -60,7 +60,7 @@ def get_bit_group(x, y, db): funcs.add("r") elif entry[1] == "buffer": funcs.add("b") - elif re_match_cached("LC_", entry[1]): + elif re_match_cached(r"LC_", entry[1]): funcs.add("l") elif entry[1] == "NegClk": funcs.add("N") diff --git a/icebox/icebox_stat.py b/icebox/icebox_stat.py index 5752267812..f13dd2f7d0 100755 --- a/icebox/icebox_stat.py +++ b/icebox/icebox_stat.py @@ -73,23 +73,23 @@ def usage(): for segs in connections: for seg in segs: if ic.tile_type(seg[0], seg[1]) == "IO" and seg[2].startswith("io_"): - match = re_match_cached("io_(\d+)/D_(IN|OUT)_(\d+)", seg[2]) + match = re_match_cached(r"io_(\d+)/D_(IN|OUT)_(\d+)", seg[2]) if match: loc = (seg[0], seg[1], int(match.group(1))) io_locations.add(loc) if ic.tile_type(seg[0], seg[1]) == "LOGIC" and seg[2].startswith("lutff_"): - match = re_match_cached("lutff_(\d)/in_\d", seg[2]) + match = re_match_cached(r"lutff_(\d)/in_\d", seg[2]) if match: loc = (seg[0], seg[1], int(match.group(1))) lut_locations.add(loc) - match = re_match_cached("lutff_(\d)/cout", seg[2]) + match = re_match_cached(r"lutff_(\d)/cout", seg[2]) if match: loc = (seg[0], seg[1], int(match.group(1))) carry_locations.add(loc) - match = re_match_cached("lutff_(\d)/out", seg[2]) + match = re_match_cached(r"lutff_(\d)/out", seg[2]) if match: loc = (seg[0], seg[1], int(match.group(1))) seq_bits = icebox.get_lutff_seq_bits(ic.tile(loc[0], loc[1]), loc[2]) @@ -101,7 +101,7 @@ def usage(): bram_locations.add(loc) if seg[2].startswith("glb_netwk_"): - match = re_match_cached("glb_netwk_(\d)", seg[2]) + match = re_match_cached(r"glb_netwk_(\d)", seg[2]) if match: global_nets.add(int(match.group(1))) diff --git a/icebox/icebox_vlog.py b/icebox/icebox_vlog.py index 74ac3d3dac..b61e9ed958 100755 --- a/icebox/icebox_vlog.py +++ b/icebox/icebox_vlog.py @@ -94,7 +94,7 @@ def usage(): elif o in ("-p", "-P"): with open(a, "r") as f: for line in f: - if o == "-P" and not re_search_cached(" # ICE_(GB_)?IO", line): + if o == "-P" and not re_search_cached(r" # ICE_(GB_)?IO", line): continue line = re_sub_cached(r"#.*", "", line.strip()).split() if "--warn-no-port" in line: @@ -235,8 +235,8 @@ def get_pll_bits(pllinfo, name, n): iocells_negclk.add((idx[0], idx[1], 0)) iocells_negclk.add((idx[0], idx[1], 1)) if entry[1].startswith("IOB_") and entry[2].startswith("PINTYPE_") and tc.match(entry[0]): - match1 = re_match_cached("IOB_(\d+)", entry[1]) - match2 = re_match_cached("PINTYPE_(\d+)", entry[2]) + match1 = re_match_cached(r"IOB_(\d+)", entry[1]) + match2 = re_match_cached(r"PINTYPE_(\d+)", entry[2]) assert match1 and match2 iocells_type[(idx[0], idx[1], int(match1.group(1)))][int(match2.group(1))] = "1" iocells_type[(idx[0], idx[1], 0)] = "".join(iocells_type[(idx[0], idx[1], 0)]) @@ -245,7 +245,7 @@ def get_pll_bits(pllinfo, name, n): for segs in sorted(ic.group_segments()): for seg in segs: if ic.tile_type(seg[0], seg[1]) == "IO": - match = re_match_cached("io_(\d+)/D_(IN|OUT)_(\d+)", seg[2]) + match = re_match_cached(r"io_(\d+)/D_(IN|OUT)_(\d+)", seg[2]) if match: cell = (seg[0], seg[1], int(match.group(1))) if cell in iocells_skip: @@ -288,7 +288,7 @@ def next_netname(): renamed_net_to_port = False for s in segs: - match = re_match_cached("io_(\d+)/PAD", s[2]) + match = re_match_cached(r"io_(\d+)/PAD", s[2]) if match: idx = (s[0], s[1], int(match.group(1))) p = "io_%d_%d_%d" % idx @@ -323,7 +323,7 @@ def next_netname(): text_ports.append("inout %s" % p) text_wires.append("assign %s = %s;" % (p, n)) - match = re_match_cached("lutff_(\d+)/", s[2]) + match = re_match_cached(r"lutff_(\d+)/", s[2]) if match: #IpCon and DSP tiles look like logic tiles, but aren't. if ic.device in ["5k", "u4k"] and (s[0] == 0 or s[0] == ic.max_x):