cCreateSparkSpendTransaction tweaks

This commit is contained in:
julian
2023-12-20 13:48:41 -06:00
parent a9567841c7
commit a9698eaf4d
6 changed files with 124 additions and 59 deletions

View File

@@ -260,7 +260,7 @@ abstract final class LibSpark {
/// Returns the serialized spark spend.
///
static ({
String serializedSpendPayload,
Uint8List serializedSpendPayload,
List<Uint8List> outputScripts,
int fee,
}) createSparkSendTransaction({
@@ -276,7 +276,14 @@ abstract final class LibSpark {
String memo
})>
privateRecipients,
required List<(String, String)> serializedCoins,
required List<
({
String serializedCoin,
String serializedCoinContext,
int groupId,
int height,
})>
serializedCoins,
required List<
({
int setId,
@@ -284,6 +291,12 @@ abstract final class LibSpark {
List<({String serializedCoin, String txHash})> set
})>
allAnonymitySets,
required List<
({
int setId,
Uint8List blockHash,
})>
idAndBlockHashes,
}) {
final privateKeyPtr =
privateKeyHex.to32BytesFromHex().unsignedCharPointer();
@@ -315,21 +328,27 @@ abstract final class LibSpark {
privateRecipients[i].sparkAddress.toNativeUtf8().cast<Char>();
}
final serializedCoinsPtr = malloc.allocate<CCDataStream>(
sizeOf<CCDataStream>() * serializedCoins.length);
final serializedCoinContextsPtr = malloc.allocate<CCDataStream>(
sizeOf<CCDataStream>() * serializedCoins.length);
final serializedCoinsPtr = malloc.allocate<DartSpendCoinData>(
sizeOf<DartSpendCoinData>() * serializedCoins.length);
for (int i = 0; i < serializedCoins.length; i++) {
// take sublist as tx hash is also appended here for some reason
final b64CoinDecoded =
base64Decode(serializedCoins[i].$1).sublist(0, 244);
serializedCoinsPtr[i].data = b64CoinDecoded.unsignedCharPointer();
serializedCoinsPtr[i].length = b64CoinDecoded.length;
final b64CoinDecoded = base64Decode(serializedCoins[i].serializedCoin);
serializedCoinsPtr[i].serializedCoin =
malloc.allocate<CCDataStream>(sizeOf<CCDataStream>());
serializedCoinsPtr[i].serializedCoin.ref.data =
b64CoinDecoded.unsignedCharPointer();
serializedCoinsPtr[i].serializedCoin.ref.length = b64CoinDecoded.length;
final b64ContextDecoded = base64Decode(serializedCoins[i].$2);
serializedCoinContextsPtr[i].data =
final b64ContextDecoded =
base64Decode(serializedCoins[i].serializedCoinContext);
serializedCoinsPtr[i].serializedCoinContext =
malloc.allocate<CCDataStream>(sizeOf<CCDataStream>());
serializedCoinsPtr[i].serializedCoinContext.ref.data =
b64ContextDecoded.unsignedCharPointer();
serializedCoinContextsPtr[i].length = b64ContextDecoded.length;
serializedCoinsPtr[i].serializedCoinContext.ref.length =
b64ContextDecoded.length;
serializedCoinsPtr[i].groupId = serializedCoins[i].groupId;
serializedCoinsPtr[i].height = serializedCoins[i].height;
}
final coverSetDataAllPtr = malloc.allocate<CCoverSetData>(
@@ -355,6 +374,15 @@ abstract final class LibSpark {
coverSetDataAllPtr[i].cover_set_representationLength = setHash.length;
}
final idAndBlockHashesPtr = malloc.allocate<BlockHashAndId>(
sizeOf<BlockHashAndId>() * idAndBlockHashes.length);
for (int i = 0; i < idAndBlockHashes.length; i++) {
assert(idAndBlockHashes[i].blockHash.length == 32);
idAndBlockHashesPtr[i].id = idAndBlockHashes[i].setId;
idAndBlockHashesPtr[i].hash =
idAndBlockHashes[i].blockHash.unsignedCharPointer();
}
final result = _bindings.cCreateSparkSpendTransaction(
privateKeyPtr,
index,
@@ -364,9 +392,10 @@ abstract final class LibSpark {
privateRecipients.length,
serializedCoinsPtr,
serializedCoins.length,
serializedCoinContextsPtr,
coverSetDataAllPtr,
allAnonymitySets.length,
idAndBlockHashesPtr,
idAndBlockHashes.length,
);
// todo: more comprehensive frees
@@ -374,7 +403,6 @@ abstract final class LibSpark {
malloc.free(recipientsPtr);
malloc.free(privateRecipientsPtr);
malloc.free(serializedCoinsPtr);
malloc.free(serializedCoinContextsPtr);
malloc.free(coverSetDataAllPtr);
if (result.address == nullptr.address) {
@@ -384,10 +412,11 @@ abstract final class LibSpark {
}
final messageBytes = result.ref.data.toUint8List(result.ref.dataLength);
final message = utf8.decode(messageBytes);
malloc.free(result.ref.data);
if (result.ref.isError > 0) {
final message = utf8.decode(messageBytes);
throw Exception(message);
}
@@ -403,7 +432,11 @@ abstract final class LibSpark {
malloc.free(result.ref.outputScripts);
return (serializedSpendPayload: message, fee: fee, outputScripts: scripts);
return (
serializedSpendPayload: messageBytes,
fee: fee,
outputScripts: scripts
);
}
static ({int changeToMint, List<LibSparkCoin> coins}) getCoinsToSpend({

View File

@@ -136,11 +136,12 @@ class FlutterLibsparkmobileBindings {
int recipientsLength,
ffi.Pointer<COutputRecipient> privateRecipients,
int privateRecipientsLength,
ffi.Pointer<CCDataStream> serializedCoins,
int serializedCoinsLength,
ffi.Pointer<CCDataStream> serializedCoinContexts,
ffi.Pointer<DartSpendCoinData> coins,
int coinsLength,
ffi.Pointer<CCoverSetData> cover_set_data_all,
int cover_set_data_allLength,
ffi.Pointer<BlockHashAndId> idAndBlockHashes,
int idAndBlockHashesLength,
) {
return _cCreateSparkSpendTransaction(
keyData,
@@ -149,11 +150,12 @@ class FlutterLibsparkmobileBindings {
recipientsLength,
privateRecipients,
privateRecipientsLength,
serializedCoins,
serializedCoinsLength,
serializedCoinContexts,
coins,
coinsLength,
cover_set_data_all,
cover_set_data_allLength,
idAndBlockHashes,
idAndBlockHashesLength,
);
}
@@ -166,10 +168,11 @@ class FlutterLibsparkmobileBindings {
ffi.Int,
ffi.Pointer<COutputRecipient>,
ffi.Int,
ffi.Pointer<CCDataStream>,
ffi.Pointer<DartSpendCoinData>,
ffi.Int,
ffi.Pointer<CCDataStream>,
ffi.Pointer<CCoverSetData>,
ffi.Int,
ffi.Pointer<BlockHashAndId>,
ffi.Int)>>('cCreateSparkSpendTransaction');
late final _cCreateSparkSpendTransaction =
_cCreateSparkSpendTransactionPtr.asFunction<
@@ -180,10 +183,11 @@ class FlutterLibsparkmobileBindings {
int,
ffi.Pointer<COutputRecipient>,
int,
ffi.Pointer<CCDataStream>,
ffi.Pointer<DartSpendCoinData>,
int,
ffi.Pointer<CCDataStream>,
ffi.Pointer<CCoverSetData>,
int,
ffi.Pointer<BlockHashAndId>,
int)>();
ffi.Pointer<GetSparkCoinsResult> getCoinsToSpend(
@@ -603,3 +607,22 @@ final class ValidateAddressResult extends ffi.Struct {
external ffi.Pointer<ffi.Char> errorMessage;
}
final class BlockHashAndId extends ffi.Struct {
external ffi.Pointer<ffi.UnsignedChar> hash;
@ffi.Int()
external int id;
}
final class DartSpendCoinData extends ffi.Struct {
external ffi.Pointer<CCDataStream> serializedCoin;
external ffi.Pointer<CCDataStream> serializedCoinContext;
@ffi.Int()
external int groupId;
@ffi.Int()
external int height;
}

View File

@@ -192,11 +192,12 @@ SparkSpendTransactionResult* cCreateSparkSpendTransaction(
int recipientsLength,
struct COutputRecipient* privateRecipients,
int privateRecipientsLength,
struct CCDataStream* serializedCoins,
int serializedCoinsLength,
struct CCDataStream* serializedCoinContexts,
struct DartSpendCoinData* coins,
int coinsLength,
struct CCoverSetData* cover_set_data_all,
int cover_set_data_allLength
int cover_set_data_allLength,
struct BlockHashAndId* idAndBlockHashes,
int idAndBlockHashesLength
) {
try {
// Derive the keys from the key data and index.
@@ -223,37 +224,35 @@ SparkSpendTransactionResult* cCreateSparkSpendTransaction(
// Convert CCSparkMintMeta* serializedMintMetas to std::list<CSparkMintMeta> cppCoins.
std::list<CSparkMintMeta> cppCoins;
for (int i = 0; i < serializedCoinsLength; i++) {
std::vector<unsigned char> vec(serializedCoins[i].data, serializedCoins[i].data + serializedCoins[i].length);
CDataStream stream(vec, SER_NETWORK, PROTOCOL_VERSION);
spark::Coin coin;
stream >> coin;
std::vector<unsigned char> contextVec(serializedCoinContexts[i].data, serializedCoinContexts[i].data + serializedCoinContexts[i].length);
for (int i = 0; i < coinsLength; i++) {
spark::Coin coin = deserializeCoin(coins[i].serializedCoin->data, coins[i].serializedCoin->length);
std::vector<unsigned char> contextVec(coins[i].serializedCoinContext->data, coins[i].serializedCoinContext->data + coins[i].serializedCoinContext->length);
coin.setSerialContext(contextVec);
CSparkMintMeta meta = getMetadata(coin, incomingViewKey);
meta.nId = coins[i].groupId;
meta.nHeight = coins[i].height;
meta.coin = coin;
cppCoins.push_back(meta);
}
// Convert CCoverSets* cover_set_data_all to a std::unordered_map<uint64_t, spark::CoverSetData> cppCoverSetDataAll
// TODO verify correctness.
std::unordered_map<uint64_t, spark::CoverSetData> cppCoverSetDataAll;
for (int i = 0; i < cover_set_data_allLength; i++) {
spark::CoverSetData cppCoverSetData;
for (int j = 0; j < cover_set_data_all[i].cover_setLength; j++) {
std::vector<spark::Coin> cppCoverSetCoins;
spark::Coin coin = coinFromCCDataStream(cover_set_data_all[i].cover_set[j]);
cppCoverSetCoins.push_back(coin);
// Construct spark::CoverSetData.
spark::CoverSetData cppCoverSetData;
cppCoverSetData.cover_set = cppCoverSetCoins;
cppCoverSetData.cover_set_representation = std::vector<unsigned char>(cover_set_data_all[i].cover_set_representation, cover_set_data_all[i].cover_set_representation + cover_set_data_all[i].cover_set_representationLength);
cppCoverSetDataAll[cover_set_data_all[i].setId] = cppCoverSetData;
spark::Coin coin = deserializeCoin(cover_set_data_all[i].cover_set[j].data, cover_set_data_all[i].cover_set[j].length);
cppCoverSetData.cover_set.push_back(coin);
}
cppCoverSetData.cover_set_representation = std::vector<unsigned char>(cover_set_data_all[i].cover_set_representation, cover_set_data_all[i].cover_set_representation + cover_set_data_all[i].cover_set_representationLength);
cppCoverSetDataAll[cover_set_data_all[i].setId] = cppCoverSetData;
}
std::map<uint64_t, uint256> cppIdAndBlockHashesAll;
for (int i = 0; i < idAndBlockHashesLength; i++) {
std::vector<unsigned char> vec(idAndBlockHashes[i].hash, idAndBlockHashes[i].hash + 32);
cppIdAndBlockHashesAll[idAndBlockHashes[i].id] = uint256(vec);
}
// Required but unused params.
std::map<uint64_t, uint256> cppIdAndBlockHashesAll;
uint256 cppTxHashSig;
// Output data

View File

@@ -71,11 +71,12 @@ struct SparkSpendTransactionResult* cCreateSparkSpendTransaction(
int recipientsLength,
struct COutputRecipient* privateRecipients,
int privateRecipientsLength,
struct CCDataStream* serializedCoins,
int serializedCoinsLength,
struct CCDataStream* serializedCoinContexts,
struct DartSpendCoinData* coins,
int coinsLength,
struct CCoverSetData* cover_set_data_all,
int cover_set_data_allLength
int cover_set_data_allLength,
struct BlockHashAndId* idAndBlockHashes,
int idAndBlockHashesLength
);
FFI_PLUGIN_EXPORT

View File

@@ -237,6 +237,18 @@ struct ValidateAddressResult {
char *errorMessage;
};
struct BlockHashAndId {
unsigned char* hash;
int id;
};
struct DartSpendCoinData {
struct CCDataStream* serializedCoin;
struct CCDataStream* serializedCoinContext;
int groupId;
int height;
};
//#ifdef __cplusplus
//}
//#endif

View File

@@ -65,10 +65,7 @@ spark::SpendKey createSpendKeyFromData(unsigned char *keyData, int index) {
spark::Coin coinFromCCDataStream(CCDataStream& cdStream) {
spark::Coin coin;
std::vector<unsigned char> vec(cdStream.data, cdStream.data + cdStream.length);
CDataStream coinStream(vec, SER_NETWORK, PROTOCOL_VERSION);
coinStream >> coin;
spark::Coin coin = deserializeCoin(cdStream.data, cdStream.length);
return coin;
}