Files
ezkl/examples/notebooks/generalized_inverse.ipynb
2025-06-27 22:58:10 +02:00

344 lines
10 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
"metadata": {
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67"
},
"source": [
"\n",
"## Generalized Inverse\n",
"\n",
"We show how to use EZKL to prove that we know matrices $A$ and its generalized inverse $B$. Since these are large we deal with the KZG commitments, with $a$ the polycommit of $A$, $b$ the polycommit of $B$, and $ABA = A$.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "95613ee9",
"metadata": {
"id": "95613ee9"
},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"# here we create and (potentially train a model)\n",
"\n",
"# make sure you have the dependencies required here already installed\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import torch"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9LgqGF56Qcdz",
"metadata": {
"id": "9LgqGF56Qcdz"
},
"outputs": [],
"source": [
"class GeneralizedInverseProof(nn.Module):\n",
" def __init__(self):\n",
" super(GeneralizedInverseProof, self).__init__()\n",
" self.relu = nn.ReLU()\n",
"\n",
" def forward(self,A,B):\n",
" # some expression of tolerance to error in the inference\n",
" return torch.sum(torch.abs(A@B@A - A)) < 0.1\n",
"\n",
"circuit = GeneralizedInverseProof()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "YRQLvvsXVs9s",
"metadata": {
"id": "YRQLvvsXVs9s"
},
"outputs": [],
"source": [
"gip_run_args = ezkl.PyRunArgs()\n",
"gip_run_args.ignore_range_check_inputs_outputs = True\n",
"gip_run_args.input_visibility = \"polycommit\" # matrix and generalized inverse commitments\n",
"gip_run_args.output_visibility = \"fixed\" # no parameters used\n",
"gip_run_args.param_visibility = \"fixed\" # should be Tensor(True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b37637c4",
"metadata": {
"id": "b37637c4"
},
"outputs": [],
"source": [
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82db373a",
"metadata": {
"id": "82db373a"
},
"outputs": [],
"source": [
"# After training, export to onnx (network.onnx) and create a data file (input.json)\n",
"shape = [10, 10]\n",
"\n",
"A = 0.1*torch.rand(1,*shape, requires_grad=True)\n",
"B = A.inverse()\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" (A,B), # model input (or a tuple for multiple inputs)\n",
" model_path, # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input1', 'input2'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input1' : {0 : 'batch_size'},\n",
" 'input2' : {0 : 'batch_size'},\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"d0 = ((A).detach().numpy()).reshape([-1]).tolist()\n",
"d1 = ((B).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(\n",
" input_data=[d0, d1],\n",
")\n",
"\n",
" # Serialize data into file:\n",
"json.dump( data, open(data_path, 'w' ))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "HOLcdGx4eQ9n",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "HOLcdGx4eQ9n",
"outputId": "cd0a4f10-251e-492e-9f05-d8af0d79c86a"
},
"outputs": [],
"source": [
"circuit.forward(A,B)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5e374a2",
"metadata": {
"colab": {
"background_save": true,
"base_uri": "https://localhost:8080/"
},
"id": "d5e374a2",
"outputId": "11ae5963-02d4-4939-9c98-d126071a9ba0"
},
"outputs": [],
"source": [
"\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=gip_run_args)\n",
"\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cal_path = os.path.join(\"calibration.json\")\n",
"\n",
"data_array = (0.1*torch.rand(20,*shape).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3aa4f090",
"metadata": {
"id": "3aa4f090"
},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8b74dcee",
"metadata": {
"id": "8b74dcee"
},
"outputs": [],
"source": [
"# srs path\n",
"res = await ezkl.get_srs( settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18c8b7c7",
"metadata": {
"id": "18c8b7c7"
},
"outputs": [],
"source": [
"# now generate the witness file\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b1c561a8",
"metadata": {
"id": "b1c561a8"
},
"outputs": [],
"source": [
"\n",
"# we pass the witness file to the setup function so as to prepopulate the \"fixed\" columns of the circuit. \n",
"# in this case we want to force the output to be 0 meaning that the difference between the two matrices is 0\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" witness_path = witness_path,\n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c384cbc8",
"metadata": {
"id": "c384cbc8"
},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76f00d41",
"metadata": {
"id": "76f00d41"
},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}