diff --git a/packages/ERTP/src/paymentLedger.js b/packages/ERTP/src/paymentLedger.js index 64522ba0a79d..cc5cd2dbadc9 100644 --- a/packages/ERTP/src/paymentLedger.js +++ b/packages/ERTP/src/paymentLedger.js @@ -139,6 +139,7 @@ export const vivifyPaymentLedger = ( const paymentLedger = provideDurableWeakMapStore( issuerBaggage, 'paymentLedger', + { valueShape: amountShape }, ); /** diff --git a/packages/SwingSet/src/liveslots/collectionManager.js b/packages/SwingSet/src/liveslots/collectionManager.js index b30e1ec4a56a..6c59dffe6ae5 100644 --- a/packages/SwingSet/src/liveslots/collectionManager.js +++ b/packages/SwingSet/src/liveslots/collectionManager.js @@ -5,6 +5,7 @@ import { assertKeyPattern, assertPattern, matches, + fit, compareRank, M, zeroPad, @@ -13,6 +14,8 @@ import { isEncodedRemotable, makeCopySet, makeCopyMap, + mustCompress, + decompress, } from '@agoric/store'; import { Far, passStyleOf } from '@endo/marshal'; import { decodeToJustin } from '@endo/marshal/src/marshal-justin.js'; @@ -213,7 +216,7 @@ export function makeCollectionManager( return storeKindInfo[kindName].kindID; } - // Not that it's only used for this purpose, what should it be called? + // Now that it's only used for this purpose, what should it be called? // TODO Should we be using the new encodeBigInt scheme instead, anyway? const BIGINT_TAG_LEN = 10; @@ -257,6 +260,23 @@ export function makeCollectionManager( const dbKeyPrefix = `vc.${collectionID}.`; let currentGenerationNumber = 0; + const keyLabel = `invalid key type for collection ${q(label)}`; + const valueLabel = `invalid value type for collection ${q(label)}`; + + const serializeValue = value => { + if (valueShape === undefined) { + return serialize(value); + } + return serialize(mustCompress(value, valueShape, valueLabel)); + }; + + const unserializeValue = data => { + if (valueShape === undefined) { + return unserialize(data); + } + return decompress(unserialize(data), valueShape); + }; + function prefix(dbEntryKey) { return `${dbKeyPrefix}${dbEntryKey}`; } @@ -331,11 +351,10 @@ export function makeCollectionManager( } function get(key) { - matches(key, keyShape) || - assert.fail(X`invalid key type for collection ${q(label)}`); + fit(key, keyShape, keyLabel); const result = syscall.vatstoreGet(keyToDBKey(key)); if (result) { - return unserialize(JSON.parse(result)); + return unserializeValue(JSON.parse(result)); } assert.fail(X`key ${key} not found in collection ${q(label)}`); } @@ -351,16 +370,11 @@ export function makeCollectionManager( } function init(key, value) { - matches(key, keyShape) || - assert.fail(X`invalid key type for collection ${q(label)}`); + fit(key, keyShape, keyLabel); !has(key) || assert.fail(X`key ${key} already registered in collection ${q(label)}`); - if (valueShape) { - matches(value, valueShape) || - assert.fail(X`invalid value type for collection ${q(label)}`); - } currentGenerationNumber += 1; - const serializedValue = serialize(value); + const serializedValue = serializeValue(value); assertAcceptableSyscallCapdataSize([serializedValue]); if (durable) { serializedValue.slots.forEach((vref, slotIndex) => { @@ -388,13 +402,8 @@ export function makeCollectionManager( } function set(key, value) { - matches(key, keyShape) || - assert.fail(X`invalid key type for collection ${q(label)}`); - if (valueShape) { - matches(value, valueShape) || - assert.fail(X`invalid value type for collection ${q(label)}`); - } - const after = serialize(harden(value)); + fit(key, keyShape, keyLabel); + const after = serializeValue(harden(value)); assertAcceptableSyscallCapdataSize([after]); if (durable) { after.slots.forEach((vref, i) => { @@ -412,8 +421,7 @@ export function makeCollectionManager( } function deleteInternal(key) { - matches(key, keyShape) || - assert.fail(X`invalid key type for collection ${q(label)}`); + fit(key, keyShape, keyLabel); const dbKey = keyToDBKey(key); const rawValue = syscall.vatstoreGet(dbKey); assert(rawValue, X`key ${key} not found in collection ${q(label)}`); @@ -472,7 +480,7 @@ export function makeCollectionManager( if (dbKey < end) { priorDBKey = dbKey; if (ignoreKeys) { - const value = unserialize(JSON.parse(dbValue)); + const value = unserializeValue(JSON.parse(dbValue)); if (matches(value, valuePatt)) { yield [undefined, value]; } @@ -484,7 +492,7 @@ export function makeCollectionManager( } else { const key = dbKeyToKey(dbKey); if (matches(key, keyPatt)) { - const value = unserialize(JSON.parse(dbValue)); + const value = unserializeValue(JSON.parse(dbValue)); if (matches(value, valuePatt)) { yield [key, value]; } diff --git a/packages/SwingSet/test/stores/test-collections.js b/packages/SwingSet/test/stores/test-collections.js index 0750dc99a3bb..c70604412404 100644 --- a/packages/SwingSet/test/stores/test-collections.js +++ b/packages/SwingSet/test/stores/test-collections.js @@ -174,7 +174,9 @@ test('constrain map key shape', t => { t.is(stringsOnly.get('skey'), 'this should work'); t.throws( () => stringsOnly.init(29, 'this should not work'), - m('invalid key type for collection "map key strings only"'), + m( + 'invalid key type for collection "map key strings only": number 29 - Must be a string', + ), ); const noStrings = makeScalarBigMapStore('map key no strings', { @@ -184,27 +186,31 @@ test('constrain map key shape', t => { noStrings.init(true, 'boolean ok'); t.throws( () => noStrings.init('foo', 'string not ok?'), - m('invalid key type for collection "map key no strings"'), + m( + 'invalid key type for collection "map key no strings": "foo" - Must fail negated pattern: "[match:string]"', + ), ); t.is(noStrings.get(47), 'number ok'); t.is(noStrings.get(true), 'boolean ok'); t.falsy(noStrings.has('foo')); t.throws( () => noStrings.get('foo'), - m('invalid key type for collection "map key no strings"'), + m( + 'invalid key type for collection "map key no strings": "foo" - Must fail negated pattern: "[match:string]"', + ), ); const only47 = makeScalarBigMapStore('map key only 47', { keyShape: 47 }); only47.init(47, 'this number ok'); t.throws( () => only47.init(29, 'this number not ok?'), - m('invalid key type for collection "map key only 47"'), + m('invalid key type for collection "map key only 47": 29 - Must be: 47'), ); t.is(only47.get(47), 'this number ok'); t.falsy(only47.has(29)); t.throws( () => only47.get(29), - m('invalid key type for collection "map key only 47"'), + m('invalid key type for collection "map key only 47": 29 - Must be: 47'), ); const lt47 = makeScalarBigMapStore('map key less than 47', { @@ -213,13 +219,17 @@ test('constrain map key shape', t => { lt47.init(29, 'this number ok'); t.throws( () => lt47.init(53, 'this number not ok?'), - m('invalid key type for collection "map key less than 47"'), + m( + 'invalid key type for collection "map key less than 47": 53 - Must be < 47', + ), ); t.is(lt47.get(29), 'this number ok'); t.falsy(lt47.has(53)); t.throws( () => lt47.get(53), - m('invalid key type for collection "map key less than 47"'), + m( + 'invalid key type for collection "map key less than 47": 53 - Must be < 47', + ), ); lt47.init(11, 'lower value'); lt47.init(46, 'higher value'); @@ -235,7 +245,9 @@ test('constrain map value shape', t => { t.is(stringsOnly.get('sval'), 'string value'); t.throws( () => stringsOnly.init('nval', 29), - m('invalid value type for collection "map value strings only"'), + m( + 'invalid value type for collection "map value strings only": number 29 - Must be a string', + ), ); const noStrings = makeScalarBigMapStore('map value no strings', { @@ -245,7 +257,9 @@ test('constrain map value shape', t => { noStrings.init('bkey', true); t.throws( () => noStrings.init('skey', 'string not ok?'), - m('invalid value type for collection "map value no strings"'), + m( + 'invalid value type for collection "map value no strings": "string not ok?" - Must fail negated pattern: "[match:string]"', + ), ); t.is(noStrings.get('nkey'), 47); t.is(noStrings.get('bkey'), true); @@ -257,7 +271,9 @@ test('constrain map value shape', t => { only47.init('47key', 47); t.throws( () => only47.init('29key', 29), - m('invalid value type for collection "map value only 47"'), + m( + 'invalid value type for collection "map value only 47": 29 - Must be: 47', + ), ); t.is(only47.get('47key'), 47); t.falsy(only47.has('29key')); @@ -268,7 +284,9 @@ test('constrain map value shape', t => { lt47.init('29key', 29); t.throws( () => lt47.init('53key', 53), - m('invalid value type for collection "map value less than 47"'), + m( + 'invalid value type for collection "map value less than 47": 53 - Must be < 47', + ), ); t.is(lt47.get('29key'), 29); t.falsy(lt47.has('53key')); @@ -288,7 +306,9 @@ test('constrain set key shape', t => { t.truthy(stringsOnly.has('skey')); t.throws( () => stringsOnly.add(29), - m('invalid key type for collection "strings only set"'), + m( + 'invalid key type for collection "strings only set": number 29 - Must be a string', + ), ); const noStrings = makeScalarBigSetStore('no strings set', { @@ -298,7 +318,9 @@ test('constrain set key shape', t => { noStrings.add(true); t.throws( () => noStrings.add('foo?'), - m('invalid key type for collection "no strings set"'), + m( + 'invalid key type for collection "no strings set": "foo?" - Must fail negated pattern: "[match:string]"', + ), ); t.truthy(noStrings.has(47)); t.truthy(noStrings.has(true)); @@ -311,7 +333,7 @@ test('constrain set key shape', t => { t.falsy(only47.has(29)); t.throws( () => only47.add(29), - m('invalid key type for collection "only 47 set"'), + m('invalid key type for collection "only 47 set": 29 - Must be: 47'), ); const lt47 = makeScalarBigSetStore('less than 47 set', { @@ -320,7 +342,7 @@ test('constrain set key shape', t => { lt47.add(29); t.throws( () => lt47.add(53), - m('invalid key type for collection "less than 47 set"'), + m('invalid key type for collection "less than 47 set": 53 - Must be < 47'), ); t.truthy(lt47.has(29)); t.falsy(lt47.has(53)); diff --git a/packages/store/src/index.js b/packages/store/src/index.js index bf5b306f4bd3..ec9cd6f8223d 100755 --- a/packages/store/src/index.js +++ b/packages/store/src/index.js @@ -56,6 +56,8 @@ export { fit, } from './patterns/patternMatchers.js'; +export { compress, mustCompress, decompress } from './patterns/compress.js'; + export { defendPrototype, initEmpty, diff --git a/packages/store/src/patterns/compress.js b/packages/store/src/patterns/compress.js new file mode 100644 index 000000000000..6ec9d6881408 --- /dev/null +++ b/packages/store/src/patterns/compress.js @@ -0,0 +1,248 @@ +// @ts-check +import { assertChecker, makeTagged, passStyleOf } from '@endo/marshal'; + +import { recordParts } from './rankOrder.js'; +import { + kindOf, + assertPattern, + maybeMatchHelper, + matches, + checkMatches, +} from './patternMatchers.js'; +import { isKey } from '../keys/checkKey.js'; +import { keyEQ } from '../keys/compareKeys.js'; + +const { fromEntries } = Object; +const { details: X, quote: q } = assert; + +/** + * When, for example, all the specimens in a given store match a + * specific pattern, then each of those specimens must contain the same + * literal superstructure as their one shared pattern. Therefore, storing + * that literal superstructure would be redumdant. If `specimen` does + * match `pattern`, then `compress(specimen, pattern)` will return a bindings + * array which is hopefully more compact than `specimen` as a whole, but + * carries all the information from specimen that cannot be derived just + * from knowledge that it matches this `pattern`. + * + * @type {Compress} + */ +export const compress = (specimen, pattern) => { + // Not yet frozen! Used to accumulate bindings + const bindings = []; + const emitBinding = binding => { + bindings.push(binding); + }; + harden(emitBinding); + + /** + * @param {Passable} innerSpecimen + * @param {Pattern} innerPattern + * @returns {boolean} + */ + const compressRecur = (innerSpecimen, innerPattern) => { + assertPattern(innerPattern); + if (isKey(innerPattern)) { + return keyEQ(innerSpecimen, innerPattern); + } + const patternKind = kindOf(innerPattern); + const specimenKind = kindOf(innerSpecimen); + switch (patternKind) { + case undefined: { + return false; + } + case 'copyArray': { + if ( + specimenKind !== 'copyArray' || + innerSpecimen.length !== innerPattern.length + ) { + return false; + } + return innerPattern.every((p, i) => compressRecur(innerSpecimen[i], p)); + } + case 'copyRecord': { + if (specimenKind !== 'copyRecord') { + return false; + } + const [specimenNames, specimenValues] = recordParts(innerSpecimen); + const [pattNames, pattValues] = recordParts(innerPattern); + if (specimenNames.length !== pattNames.length) { + return false; + } + return pattNames.every( + (name, i) => + specimenNames[i] === name && + compressRecur(specimenValues[i], pattValues[i]), + ); + } + case 'copyMap': { + if (specimenKind !== 'copyMap') { + return false; + } + const { + payload: { keys: pattKeys, values: valuePatts }, + } = innerPattern; + const { + payload: { keys: specimenKeys, values: specimenValues }, + } = innerSpecimen; + // TODO BUG: this assumes that the keys appear in the + // same order, so we can compare values in that order. + // However, we're only guaranteed that they appear in + // the same rankOrder. Thus we must search one of these + // in the other's rankOrder. + if (!keyEQ(specimenKeys, pattKeys)) { + return false; + } + return compressRecur(specimenValues, valuePatts); + } + default: + { + const matchHelper = maybeMatchHelper(patternKind); + if (matchHelper) { + if (matchHelper.compress) { + const subBindings = matchHelper.compress( + innerSpecimen, + innerPattern.payload, + compress, + ); + if (subBindings === undefined) { + return false; + } else { + // Note that we're not flattening the subBindings + // Note that as long as we allow this kind of nested compression, + // we cannot feasibly preserve sort order anyway. + emitBinding(subBindings); + return true; + } + } else if (matches(innerSpecimen, innerPattern)) { + emitBinding(innerSpecimen); + return true; + } else { + return false; + } + } + } + assert.fail(X`unrecognized kind: ${q(patternKind)}`); + } + }; + + if (compressRecur(specimen, pattern)) { + return harden(bindings); + } else { + return undefined; + } +}; +harden(compress); + +/** + * `mustCompress` is to `compress` approximately as `fit` is to `matches`. + * Where `compress` indicates pattern match failure by returning `undefined`, + * `mustCompress` indicates pattern match failure by throwing an error + * with a good pattern-match-failure diagnostic. Thus, like `fit`, + * `mustCompress` has an additional optional `label` parameter to be used on + * the outside of that diagnostic if needed. If `mustCompress` does return + * normally, then the pattern match succeeded and `mustCompress` returns a + * valid bindings array. + * + * @type {MustCompress} + */ +export const mustCompress = (specimen, pattern, label = undefined) => { + const bindings = compress(specimen, pattern); + if (bindings !== undefined) { + return bindings; + } + // should only throw + checkMatches(specimen, pattern, assertChecker, label); + assert.fail(X`internal: ${label}: inconsistent pattern match: ${q(pattern)}`); +}; +harden(mustCompress); + +/** + * `decompress` reverses the compression performed by `compress` + * or `mustCompress`, in order to recover the equivalent + * of the original specimen from the `bindings` array and the `pattern`. + * + * @type {Decompress} + */ +export const decompress = (bindings, pattern) => { + passStyleOf(bindings) === 'copyArray' || + assert.fail(X`Pattern ${pattern} expected bindings array: ${bindings}`); + let i = 0; + const takeBinding = () => { + i < bindings.length || + assert.fail( + X`Pattern ${q(pattern)} expects more than ${q( + bindings.length, + )} bindings: ${bindings}`, + ); + const binding = bindings[i]; + i += 1; + return binding; + }; + harden(takeBinding); + + const decompressRecur = innerPattern => { + assertPattern(innerPattern); + if (isKey(innerPattern)) { + return innerPattern; + } + const patternKind = kindOf(innerPattern); + switch (patternKind) { + case undefined: { + assert.fail(X`decompress expected a pattern: ${q(innerPattern)}`); + } + case 'copyArray': { + return harden(innerPattern.map(p => decompressRecur(p))); + } + case 'copyRecord': { + const [pattNames, pattValues] = recordParts(innerPattern); + return harden( + fromEntries( + pattNames.map((name, j) => [name, decompressRecur(pattValues[j])]), + ), + ); + } + case 'copyMap': { + const { + payload: { keys: pattKeys, values: valuePatts }, + } = innerPattern; + return makeTagged( + 'copyMap', + harden({ + keys: pattKeys, + values: valuePatts.map(p => decompressRecur(p)), + }), + ); + } + default: + { + const matchHelper = maybeMatchHelper(patternKind); + if (matchHelper) { + if (matchHelper.decompress) { + const subBindings = takeBinding(); + passStyleOf(subBindings) === 'copyArray' || + assert.fail( + X`Pattern ${q( + innerPattern, + )} expected nested bindings array: ${subBindings}`, + ); + + return matchHelper.decompress( + subBindings, + innerPattern.payload, + decompress, + ); + } else { + return takeBinding(); + } + } + } + assert.fail( + X`unrecognized pattern kind: ${q(patternKind)} ${q(innerPattern)}`, + ); + } + }; + + return decompressRecur(pattern); +}; +harden(decompress); diff --git a/packages/store/src/patterns/patternMatchers.js b/packages/store/src/patterns/patternMatchers.js index 2cb1dfdc9ba1..34004edcce8a 100644 --- a/packages/store/src/patterns/patternMatchers.js +++ b/packages/store/src/patterns/patternMatchers.js @@ -30,6 +30,8 @@ import { checkCopyMap, copyMapKeySet, checkCopyBag, + makeCopySet, + makeCopyBag, } from '../keys/checkKey.js'; /// @@ -136,6 +138,15 @@ const checkDecimalDigitsLimit = (specimen, decimalDigitsLimit, check) => { ); }; +/** + * @typedef {string} Kind + * It is either a PassStyle other than 'tagged', or, if the underlying + * PassStyle is 'tagged', then the `getTag` value for tags that are + * recognized at the store level of abstraction. For each of those + * tags, a tagged record only has that kind if it satisfies the invariants + * that the store level associates with that kind. + */ + /** * @returns {PatternKit} */ @@ -151,15 +162,6 @@ const makePatternKit = () => { // eslint-disable-next-line no-use-before-define HelpersByMatchTag[tag]; - /** - * @typedef {string} Kind - * It is either a PassStyle other than 'tagged', or, if the underlying - * PassStyle is 'tagged', then the `getTag` value for tags that are - * recognized at the store level of abstraction. For each of those - * tags, a tagged record only has that kind if it satisfies the invariants - * that the store level associates with that kind. - */ - /** * @type {WeakMap} * Only for tagged records of recognized kinds whose store-level invariants @@ -256,6 +258,16 @@ const makePatternKit = () => { return false; }; + /** + * Checks only recognized kinds, and only if the specimen + * passes the invariants associated with that recognition. + * + * @param {Passable} specimen + * @param {Kind} kind + * @returns {boolean} + */ + const isKind = (specimen, kind) => checkKind(specimen, kind, identChecker); + /** * @param {Passable} specimen * @param {Key} keyAsPattern @@ -554,6 +566,11 @@ const makePatternKit = () => { const pattValues = pattPayload.values; const specimenValues = specimenPayload.values; // compare values as copyArrays + // TODO BUG: this assumes that the keys appear in the + // same order, so we can compare values in that order. + // However, we're only guaranteed that they appear in + // the same rankOrder. Thus we must search one of these + // in the other's rankOrder. return checkMatches(specimenValues, pattValues, check); } default: { @@ -703,8 +720,15 @@ const makePatternKit = () => { return getPassStyleCover(passStyle); }; + /** + * @param {Passable[]} array + * @param {Pattern} patt + * @param {Checker} check + * @param {string} [labelPrefix] + * @returns {boolean} + */ const arrayEveryMatchPattern = (array, patt, check, labelPrefix = '') => { - if (checkKind(patt, 'match:any', identChecker)) { + if (isKind(patt, 'match:any')) { // if the pattern is M.any(), we know its true return true; } @@ -713,6 +737,42 @@ const makePatternKit = () => { ); }; + /** + * @param { Passable[] } array + * @param { Pattern } patt + * @param {Compress} compress + * @returns {Passable[] | undefined} + */ + const arrayCompressMatchPattern = (array, patt, compress) => { + if (isKind(patt, 'match:any')) { + return array; + } + const bindings = []; + for (const el of array) { + const subBindings = compress(el, patt); + if (subBindings) { + // Note: not flattened + bindings.push(subBindings); + } else { + return undefined; + } + } + return harden(bindings); + }; + + /** + * @param {Passable[]} bindings + * @param {Pattern} patt + * @param {Decompress} decompress + * @returns {Passable[]} + */ + const arrayDecompressMatchPattern = (bindings, patt, decompress) => { + if (isKind(patt, 'match:any')) { + return bindings; + } + return harden(bindings.map(subBindings => decompress(subBindings, patt))); + }; + // /////////////////////// Match Helpers ///////////////////////////////////// /** @type {MatchHelper} */ @@ -1001,7 +1061,7 @@ const makePatternKit = () => { const matchRemotableHelper = Far('match:remotable helper', { checkMatches: (specimen, remotableDesc, check) => { // Unfortunate duplication of checkKind logic, but no better choices. - if (checkKind(specimen, 'remotable', identChecker)) { + if (isKind(specimen, 'remotable')) { return true; } if (check === identChecker) { @@ -1180,6 +1240,20 @@ const makePatternKit = () => { ); }, + compress: (specimen, [subPatt, limits = undefined], compress) => { + const { arrayLengthLimit } = limit(limits); + if ( + isKind(specimen, 'copyArray') && + specimen.length <= arrayLengthLimit + ) { + return arrayCompressMatchPattern(specimen, subPatt, compress); + } + return undefined; + }, + + decompress: (bindings, [subPatt, _limits = undefined], decompress) => + arrayDecompressMatchPattern(bindings, subPatt, decompress), + checkIsWellFormed: (payload, check) => checkIsWellFormedWithLimit( payload, @@ -1210,6 +1284,20 @@ const makePatternKit = () => { ); }, + compress: (specimen, [keyPatt, limits = undefined], compress) => { + const { numSetElementsLimit } = limit(limits); + if ( + isKind(specimen, 'copySet') && + specimen.payload.length <= numSetElementsLimit + ) { + return arrayCompressMatchPattern(specimen.payload, keyPatt, compress); + } + return undefined; + }, + + decompress: (bindings, [keyPatt, _limits = undefined], decompress) => + makeCopySet(arrayDecompressMatchPattern(bindings, keyPatt, decompress)), + checkIsWellFormed: (payload, check) => checkIsWellFormedWithLimit( payload, @@ -1253,6 +1341,41 @@ const makePatternKit = () => { ); }, + compress: ( + specimen, + [keyPatt, countPatt, limits = undefined], + compress, + ) => { + const { numUniqueBagElementsLimit, decimalDigitsLimit } = limit(limits); + if ( + isKind(specimen, 'copyBag') && + specimen.payload.length <= numUniqueBagElementsLimit && + specimen.payload.every(([_key, count]) => + checkDecimalDigitsLimit(count, decimalDigitsLimit, identChecker), + ) + ) { + return arrayCompressMatchPattern( + specimen.payload, + harden([keyPatt, countPatt]), + compress, + ); + } + return undefined; + }, + + decompress: ( + bindings, + [keyPatt, countPatt, _limits = undefined], + decompress, + ) => + makeCopyBag( + arrayDecompressMatchPattern( + bindings, + harden([keyPatt, countPatt]), + decompress, + ), + ), + checkIsWellFormed: (payload, check) => checkIsWellFormedWithLimit( payload, @@ -1298,6 +1421,46 @@ const makePatternKit = () => { ); }, + compress: ( + specimen, + [keyPatt, valuePatt, limits = undefined], + compress, + ) => { + const { numMapEntriesLimit } = limit(limits); + if ( + isKind(specimen, 'copyMap') && + specimen.payload.keys.length <= numMapEntriesLimit + ) { + return harden([ + arrayCompressMatchPattern(specimen.payload.keys, keyPatt, compress), + arrayCompressMatchPattern( + specimen.payload.values, + valuePatt, + compress, + ), + ]); + } + return undefined; + }, + + decompress: ( + [keyBindings, valueBindings], + [keyPatt, valuePatt, _limits = undefined], + decompress, + ) => { + return makeTagged( + 'copyMap', + harden({ + keys: arrayDecompressMatchPattern(keyBindings, keyPatt, decompress), + values: arrayDecompressMatchPattern( + valueBindings, + valuePatt, + decompress, + ), + }), + ); + }, + checkIsWellFormed: (payload, check) => checkIsWellFormedWithLimit( payload, @@ -1660,6 +1823,8 @@ const makePatternKit = () => { assertKeyPattern, isKeyPattern, getRankCover, + kindOf, + maybeMatchHelper, M, }); }; @@ -1679,6 +1844,8 @@ export const { assertKeyPattern, isKeyPattern, getRankCover, + kindOf, + maybeMatchHelper, M, } = makePatternKit(); diff --git a/packages/store/src/types.js b/packages/store/src/types.js index d0d0b1707b9c..d503a5abe522 100644 --- a/packages/store/src/types.js +++ b/packages/store/src/types.js @@ -8,6 +8,7 @@ /** @template T @typedef {import('@endo/marshal').CopyRecord} CopyRecord */ /** @template T @typedef {import('@endo/marshal').CopyArray} CopyArray */ /** @typedef {import('@endo/marshal').Checker} Checker */ +/** @typedef {import('./patterns/patternMatchers').Kind} Kind */ /** * @typedef {Passable} Key @@ -626,11 +627,35 @@ * @property {(patt: Pattern) => void} assertKeyPattern * @property {(patt: Passable) => boolean} isKeyPattern * @property {GetRankCover} getRankCover + * @property {(passable: Passable, check?: Checker) => (Kind | undefined)} kindOf + * @property {(tag: string) => (MatchHelper | undefined)} maybeMatchHelper * @property {MatcherNamespace} M */ // ///////////////////////////////////////////////////////////////////////////// +/** + * @callback Compress + * @param {Passable} specimen + * @param {Pattern} pattern + * @returns {Passable[] | undefined} + */ + +/** + * @callback MustCompress + * @param {Passable} specimen + * @param {Pattern} pattern + * @param {string|number} [label] + * @returns {Passable[]} + */ + +/** + * @callback Decompress + * @param {Passable[]} bindings + * @param {Pattern} pattern + * @returns {Passable} + */ + // TODO // The following type should be in internal-types.js, since the // `MatchHelper` type is purely internal to this package. However, @@ -657,6 +682,27 @@ * Assuming a valid Matcher of this type with `matcherPayload` as its * payload, does this specimen match that Matcher? * + * @property {(specimen: Passable, + * matcherPayload: Passable, + * compress: Compress + * ) => (Passable[] | undefined)} [compress] + * Assuming a valid Matcher of this type with `matcherPayload` as its + * payload, if this specimen matches this matcher, then return a + * "bindings" array of passables that represents this specimen, + * perhaps more compactly, given the knowledge that it matches this matcher. + * If the specimen does not match the matcher, return undefined. + * If this matcher has a `compress` method, then it must have a matching + * `decompress` method. + * + * @property {(bindings: Passable[], + * matcherPayload: Passable, + * decompress: Decompress + * ) => Passable} [decompress] + * If `bindings` is the result of a successful `compress` with this matcher, + * then `decompress` must return a Passable equivalent to the original specimen. + * If this matcher has an `decompress` method, then it must have a matching + * `compress` method. + * * @property {( * payload: Passable, * encodePassable: KeyToDBKey diff --git a/packages/store/test/test-compress.js b/packages/store/test/test-compress.js new file mode 100644 index 000000000000..7562a2cdd19f --- /dev/null +++ b/packages/store/test/test-compress.js @@ -0,0 +1,135 @@ +// @ts-check + +import { test } from '@agoric/swingset-vat/tools/prepare-test-env-ava.js'; +import { Far } from '@endo/marshal'; +import { + makeCopyBagFromElements, + makeCopyMap, + makeCopySet, +} from '../src/keys/checkKey.js'; +import { + compress, + decompress, + mustCompress, +} from '../src/patterns/compress.js'; +import { M } from '../src/patterns/patternMatchers.js'; + +const runTests = testTriple => { + const brand = Far('simoleans', {}); + const moolaBrand = Far('moola', {}); + const timer = Far('timer', {}); + + testTriple({ brand, value: 37n }, { brand, value: M.bigint() }, [37n]); + testTriple( + { brand, value: 37n }, + { brand: M.remotable(), value: M.bigint() }, + [37n, brand], + ); + testTriple( + { brand, value: 37n }, + { brand: M.bigint(), value: M.bigint() }, + undefined, + 'test mustCompress: brand: remotable "[Alleged: simoleans]" - Must be a bigint', + ); + testTriple({ brand, value: 37n }, M.any(), [{ brand, value: 37n }]); + testTriple({ brand, value: 37n }, M.recordOf(M.string(), M.scalar()), [ + { brand, value: 37n }, + ]); + testTriple( + [{ foo: 'a' }, { foo: 'b' }, { foo: 'c' }], + M.arrayOf(harden({ foo: M.string() })), + [[['a'], ['b'], ['c']]], + ); + testTriple( + makeCopySet([{ foo: 'a' }, { foo: 'b' }, { foo: 'c' }]), + M.setOf(harden({ foo: M.string() })), + [[['c'], ['b'], ['a']]], + ); + testTriple( + makeCopyBagFromElements([{ foo: 'a' }, { foo: 'a' }, { foo: 'c' }]), + M.bagOf(harden({ foo: M.string() })), + [ + [ + ['c', 1n], + ['a', 2n], + ], + ], + ); + testTriple( + makeCopyMap([ + [{ foo: 'a' }, { bar: 1 }], + [{ foo: 'b' }, { bar: 2 }], + [{ foo: 'c' }, { bar: 3 }], + ]), + M.mapOf(harden({ foo: M.string() }), harden({ bar: M.number() })), + [ + [ + [['c'], ['b'], ['a']], + [[3], [2], [1]], + ], + ], + ); + testTriple( + makeCopyMap([ + [{ foo: 'c' }, { bar: 3 }], + [{ foo: 'b' }, { bar: 2 }], + [{ foo: 'a' }, { bar: 1 }], + ]), + // TODO Add a test case where the keys are in the same rankOrder but not + // the same order. + makeCopyMap([ + [{ foo: 'c' }, M.any()], + // @ts-expect-error The array need not be generic + [{ foo: 'b' }, { bar: M.number() }], + [{ foo: 'a' }, { bar: 1 }], + ]), + [{ bar: 3 }, 2], + ); + testTriple( + { + want: { Winnings: { brand: moolaBrand, value: ['x', 'y'] } }, + give: { Bid: { brand, value: 37n } }, + exit: { afterDeadline: { deadline: 11n, timer } }, + }, + { + want: { Winnings: { brand: moolaBrand, value: M.array() } }, + give: { Bid: { brand, value: M.nat() } }, + exit: { afterDeadline: { deadline: M.gte(10n), timer } }, + }, + [['x', 'y'], 37n, 11n], + ); +}; + +test('compression', t => { + const testCompress = (specimen, pattern, bindings) => + t.deepEqual(compress(harden(specimen), harden(pattern)), harden(bindings)); + runTests(testCompress); +}); + +test('test mustCompress', t => { + const testCompress = (specimen, pattern, bindings, message) => { + if (bindings === undefined) { + t.throws( + () => + mustCompress(harden(specimen), harden(pattern), 'test mustCompress'), + { message }, + ); + } else { + t.deepEqual( + mustCompress(harden(specimen), harden(pattern), 'test mustCompress'), + harden(bindings), + ); + } + }; + runTests(testCompress); +}); + +test('decompression', t => { + const testDecompress = (specimen, pattern, bindings) => + bindings === undefined || + t.deepEqual( + decompress(harden(bindings), harden(pattern)), + harden(specimen), + ); + runTests(testDecompress); +}); diff --git a/packages/vat-data/src/vat-data-bindings.js b/packages/vat-data/src/vat-data-bindings.js index 85c16f941bf3..11c7d4fa7258 100644 --- a/packages/vat-data/src/vat-data-bindings.js +++ b/packages/vat-data/src/vat-data-bindings.js @@ -122,16 +122,20 @@ harden(partialAssign); */ export const provide = provideLazy; -export const provideDurableMapStore = (baggage, name) => - provide(baggage, name, () => makeScalarBigMapStore(name, { durable: true })); +export const provideDurableMapStore = (baggage, name, options = {}) => + provide(baggage, name, () => + makeScalarBigMapStore(name, { durable: true, ...options }), + ); harden(provideDurableMapStore); -export const provideDurableWeakMapStore = (baggage, name) => +export const provideDurableWeakMapStore = (baggage, name, options = {}) => provide(baggage, name, () => - makeScalarBigWeakMapStore(name, { durable: true }), + makeScalarBigWeakMapStore(name, { durable: true, ...options }), ); harden(provideDurableWeakMapStore); -export const provideDurableSetStore = (baggage, name) => - provide(baggage, name, () => makeScalarBigSetStore(name, { durable: true })); +export const provideDurableSetStore = (baggage, name, options = {}) => + provide(baggage, name, () => + makeScalarBigSetStore(name, { durable: true, ...options }), + ); harden(provideDurableSetStore);