mirror of
https://github.com/zama-ai/tfhe-rs.git
synced 2026-01-06 21:34:05 -05:00
This backend abstract communication with Hpu Fpga hardware.
It define it's proper entities to prevent circular dependencies with
tfhe-rs.
Object lifetime is handle through Arc<Mutex<T>> wrapper, and enforce
that all objects currently alive in Hpu Hw are also kept valid on the
host side.
It contains the second version of HPU instruction set (HIS_V2.0):
* DOp have following properties:
+ Template as first class citizen
+ Support of Immediate template
+ Direct parser and conversion between Asm/Hex
+ Replace deku (and it's associated endianess limitation) by
+ bitfield_struct and manual parsing
* IOp have following properties:
+ Support various number of Destination
+ Support various number of Sources
+ Support various number of Immediat values
+ Support of multiple bitwidth (Not implemented yet in the Fpga
firmware)
Details could be view in `backends/tfhe-hpu-backend/Readme.md`
94 lines
2.4 KiB
Python
94 lines
2.4 KiB
Python
# A Library to load mockup traces
|
|
import json
|
|
|
|
import pandas
|
|
|
|
from . import analysis, fmt
|
|
|
|
|
|
class ArgId:
|
|
def __init__(self, d):
|
|
self.__dict__ = d
|
|
|
|
class Instruction:
|
|
def __init__(self, d):
|
|
self.__dict__.update(d)
|
|
self.dst_id = ArgId(self.dst_id)
|
|
self.srca_id = ArgId(self.srca_id)
|
|
self.srcb_id = ArgId(self.srcb_id)
|
|
self.insn = fmt.Insn(d['op'])
|
|
|
|
def __str__(self):
|
|
return str(self.insn)
|
|
|
|
class Slot:
|
|
def __init__(self, d):
|
|
self.insn_data = Instruction(d['inst'])
|
|
self.state = d['state']
|
|
|
|
def __str__(self):
|
|
return str(self.insn_data)
|
|
|
|
def to_analysis(self):
|
|
return self.insn_data.insn.to_analysis()
|
|
|
|
# The only two subtypes
|
|
class Query:
|
|
def __init__(self, event):
|
|
self.__dict__.update(event)
|
|
self.slot = Slot(self.slot)
|
|
self.subtype = self.cmd
|
|
self.desc = str(self.slot)
|
|
def to_analysis(self):
|
|
return getattr(analysis, self.subtype)(self.slot.to_analysis())
|
|
|
|
class ReqTimeout:
|
|
def __init__(self, timestamp):
|
|
self.timestamp = timestamp
|
|
def to_analysis(self):
|
|
return analysis.ReqTimeout(self.timestamp)
|
|
|
|
class BatchStart:
|
|
def __init__(self, d):
|
|
self.pe_id = d['pe_id']
|
|
self.issued = d['issued']
|
|
def to_analysis(self):
|
|
return analysis.BatchStart(self.pe_id, self.issued)
|
|
|
|
class NamedEvent:
|
|
def __init__(self, name):
|
|
self.name = name
|
|
def to_analysis(self):
|
|
return getattr(analysis, self.name)()
|
|
|
|
class Event:
|
|
def __init__(self, trace_dict):
|
|
self.timestamp = trace_dict['timestamp']
|
|
event = trace_dict['event']
|
|
|
|
if event.__class__ == dict:
|
|
key = next(iter(event.keys()))
|
|
self.event = globals()[key](event[key])
|
|
else:
|
|
self.event = NamedEvent(event)
|
|
|
|
def to_analysis(self):
|
|
return analysis.Event(
|
|
timestamp=self.timestamp,
|
|
data=self.event.to_analysis())
|
|
|
|
class Trace:
|
|
def __init__(self, jsonfile):
|
|
with open(jsonfile, 'r') as fd:
|
|
self.traces = list(map(Event, json.load(fd)))
|
|
def __iter__(self):
|
|
return iter(self.traces)
|
|
def to_analysis(self):
|
|
return analysis.Trace((x.to_analysis() for x in self))
|
|
|
|
def from_mockup(filename: str) -> 'analysis.Trace':
|
|
return Trace(filename).to_analysis()
|
|
|
|
# Register a from directly in analysis code
|
|
setattr(analysis.Trace, 'from_mockup', from_mockup)
|