flags.add(flag)
record["flags"] = PPCRecord.Flags(flags)
- return dataclass(cls, record, keymap=PPCRecord.__KEYMAP,
- typemap=typemap)
+ return dataclass(cls, record,
+ keymap=PPCRecord.__KEYMAP,
+ typemap=typemap)
@cached_property
def names(self):
value |= (vstate << bit)
mask |= (mstate << bit)
- return _dataclasses.replace(lhs,
- opcode=Opcode(value=value, mask=mask))
+ opcode = opcode=Opcode(value=value, mask=mask)
+
+ return _dataclasses.replace(lhs, opcode=opcode)
return _functools.reduce(merge, self)
if value == "0":
record[key] = "NONE"
- record["extra"] = cls.ExtraMap(record.pop(f"{index}") \
- for index in range(0, 4))
+ extra = []
+ for idx in range(0, 4):
+ extra.append(record.pop(f"{idx}"))
+
+ record["extra"] = cls.ExtraMap(extra)
return dataclass(cls, record, keymap=cls.__KEYMAP)
def parse(stream, factory):
+ def match(entry):
+ return ("TODO" not in frozenset(entry.values()))
+
lines = filter(lambda line: not line.strip().startswith("#"), stream)
entries = _csv.DictReader(lines)
- entries = filter(lambda entry: "TODO" \
- not in frozenset(entry.values()), entries)
+ entries = filter(match, entries)
return tuple(map(factory, entries))
section.Mode.INTEGER: IntegerOpcode,
section.Mode.PATTERN: PatternOpcode,
}[section.mode]
- factory = _functools.partial(PPCRecord.CSV,
- opcode_cls=opcode_cls)
+ factory = _functools.partial(
+ PPCRecord.CSV, opcode_cls=opcode_cls)
with open(path, "r", encoding="UTF-8") as stream:
for insn in parse(stream, factory):
records[section][insn.comment].add(insn)