diff --git a/packages/ERTP/src/paymentLedger.js b/packages/ERTP/src/paymentLedger.js
index 19702d7a44f..dccab6b843b 100644
--- a/packages/ERTP/src/paymentLedger.js
+++ b/packages/ERTP/src/paymentLedger.js
@@ -35,6 +35,7 @@ const amountShapeFromElementShape = (brand, assetKind, elementShape) => {
if (elementShape === undefined) {
valueShape = M.arrayOf(M.key());
} else {
+ // M.and compresses only according to its last conjunct
valueShape = M.arrayOf(M.and(M.key(), elementShape));
}
break;
diff --git a/packages/store/src/index.js b/packages/store/src/index.js
index 12fb207c787..17bf131921a 100755
--- a/packages/store/src/index.js
+++ b/packages/store/src/index.js
@@ -54,6 +54,8 @@ export {
mustMatch,
} from './patterns/patternMatchers.js';
+export { compress, mustCompress, decompress } from './patterns/compress.js';
+
export {
initEmpty,
defineExoClass,
diff --git a/packages/store/src/patterns/compress.js b/packages/store/src/patterns/compress.js
new file mode 100644
index 00000000000..720b75afe15
--- /dev/null
+++ b/packages/store/src/patterns/compress.js
@@ -0,0 +1,255 @@
+// @ts-check
+import { assertChecker, makeTagged, passStyleOf } from '@endo/marshal';
+import { recordNames, recordValues } from '@endo/marshal/src/encodePassable.js';
+
+import {
+ kindOf,
+ assertPattern,
+ maybeMatchHelper,
+ matches,
+ checkMatches,
+} from './patternMatchers.js';
+import { isKey } from '../keys/checkKey.js';
+import { keyEQ } from '../keys/compareKeys.js';
+
+const { fromEntries } = Object;
+const { details: X, quote: q } = assert;
+
+/**
+ * When, for example, all the specimens in a given store match a
+ * specific pattern, then each of those specimens must contain the same
+ * literal superstructure as their one shared pattern. Therefore, storing
+ * that literal superstructure would be redumdant. If `specimen` does
+ * match `pattern`, then `compress(specimen, pattern)` will return a bindings
+ * array which is hopefully more compact than `specimen` as a whole, but
+ * carries all the information from specimen that cannot be derived just
+ * from knowledge that it matches this `pattern`.
+ *
+ * @type {Compress}
+ */
+export const compress = (specimen, pattern) => {
+ // Not yet frozen! Used to accumulate bindings
+ const bindings = [];
+ const emitBinding = binding => {
+ bindings.push(binding);
+ };
+ harden(emitBinding);
+
+ /**
+ * @param {Passable} innerSpecimen
+ * @param {Pattern} innerPattern
+ * @returns {boolean}
+ */
+ const compressRecur = (innerSpecimen, innerPattern) => {
+ assertPattern(innerPattern);
+ if (isKey(innerPattern)) {
+ return keyEQ(innerSpecimen, innerPattern);
+ }
+ const patternKind = kindOf(innerPattern);
+ const specimenKind = kindOf(innerSpecimen);
+ switch (patternKind) {
+ case undefined: {
+ return false;
+ }
+ case 'copyArray': {
+ if (
+ specimenKind !== 'copyArray' ||
+ innerSpecimen.length !== innerPattern.length
+ ) {
+ return false;
+ }
+ return innerPattern.every((p, i) => compressRecur(innerSpecimen[i], p));
+ }
+ case 'copyRecord': {
+ if (specimenKind !== 'copyRecord') {
+ return false;
+ }
+ const specimenNames = recordNames(innerSpecimen);
+ const pattNames = recordNames(innerPattern);
+
+ if (specimenNames.length !== pattNames.length) {
+ return false;
+ }
+ const specimenValues = recordValues(innerSpecimen, specimenNames);
+ const pattValues = recordValues(innerPattern, pattNames);
+
+ return pattNames.every(
+ (name, i) =>
+ specimenNames[i] === name &&
+ compressRecur(specimenValues[i], pattValues[i]),
+ );
+ }
+ case 'copyMap': {
+ if (specimenKind !== 'copyMap') {
+ return false;
+ }
+ const {
+ payload: { keys: pattKeys, values: valuePatts },
+ } = innerPattern;
+ const {
+ payload: { keys: specimenKeys, values: specimenValues },
+ } = innerSpecimen;
+ // TODO BUG: this assumes that the keys appear in the
+ // same order, so we can compare values in that order.
+ // However, we're only guaranteed that they appear in
+ // the same rankOrder. Thus we must search one of these
+ // in the other's rankOrder.
+ if (!keyEQ(specimenKeys, pattKeys)) {
+ return false;
+ }
+ return compressRecur(specimenValues, valuePatts);
+ }
+ default:
+ {
+ const matchHelper = maybeMatchHelper(patternKind);
+ if (matchHelper) {
+ if (matchHelper.compress) {
+ const subBindings = matchHelper.compress(
+ innerSpecimen,
+ innerPattern.payload,
+ compress,
+ );
+ if (subBindings === undefined) {
+ return false;
+ } else {
+ // Note that we're not flattening the subBindings
+ // Note that as long as we allow this kind of nested compression,
+ // we cannot feasibly preserve sort order anyway.
+ emitBinding(subBindings);
+ return true;
+ }
+ } else if (matches(innerSpecimen, innerPattern)) {
+ emitBinding(innerSpecimen);
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+ assert.fail(X`unrecognized kind: ${q(patternKind)}`);
+ }
+ };
+
+ if (compressRecur(specimen, pattern)) {
+ return harden(bindings);
+ } else {
+ return undefined;
+ }
+};
+harden(compress);
+
+/**
+ * `mustCompress` is to `compress` approximately as `fit` is to `matches`.
+ * Where `compress` indicates pattern match failure by returning `undefined`,
+ * `mustCompress` indicates pattern match failure by throwing an error
+ * with a good pattern-match-failure diagnostic. Thus, like `fit`,
+ * `mustCompress` has an additional optional `label` parameter to be used on
+ * the outside of that diagnostic if needed. If `mustCompress` does return
+ * normally, then the pattern match succeeded and `mustCompress` returns a
+ * valid bindings array.
+ *
+ * @type {MustCompress}
+ */
+export const mustCompress = (specimen, pattern, label = undefined) => {
+ const bindings = compress(specimen, pattern);
+ if (bindings !== undefined) {
+ return bindings;
+ }
+ // should only throw
+ checkMatches(specimen, pattern, assertChecker, label);
+ assert.fail(X`internal: ${label}: inconsistent pattern match: ${q(pattern)}`);
+};
+harden(mustCompress);
+
+/**
+ * `decompress` reverses the compression performed by `compress`
+ * or `mustCompress`, in order to recover the equivalent
+ * of the original specimen from the `bindings` array and the `pattern`.
+ *
+ * @type {Decompress}
+ */
+export const decompress = (bindings, pattern) => {
+ passStyleOf(bindings) === 'copyArray' ||
+ assert.fail(X`Pattern ${pattern} expected bindings array: ${bindings}`);
+ let i = 0;
+ const takeBinding = () => {
+ i < bindings.length ||
+ assert.fail(
+ X`Pattern ${q(pattern)} expects more than ${q(
+ bindings.length,
+ )} bindings: ${bindings}`,
+ );
+ const binding = bindings[i];
+ i += 1;
+ return binding;
+ };
+ harden(takeBinding);
+
+ const decompressRecur = innerPattern => {
+ assertPattern(innerPattern);
+ if (isKey(innerPattern)) {
+ return innerPattern;
+ }
+ const patternKind = kindOf(innerPattern);
+ switch (patternKind) {
+ case undefined: {
+ assert.fail(X`decompress expected a pattern: ${q(innerPattern)}`);
+ }
+ case 'copyArray': {
+ return harden(innerPattern.map(p => decompressRecur(p)));
+ }
+ case 'copyRecord': {
+ const pattNames = recordNames(innerPattern);
+ const pattValues = recordValues(innerPattern, pattNames);
+ const entries = pattNames.map((name, j) => [
+ name,
+ decompressRecur(pattValues[j]),
+ ]);
+ // Reverse so printed form looks less surprising,
+ // with ascenting rather than descending property names.
+ return harden(fromEntries(entries.reverse()));
+ }
+ case 'copyMap': {
+ const {
+ payload: { keys: pattKeys, values: valuePatts },
+ } = innerPattern;
+ return makeTagged(
+ 'copyMap',
+ harden({
+ keys: pattKeys,
+ values: valuePatts.map(p => decompressRecur(p)),
+ }),
+ );
+ }
+ default:
+ {
+ const matchHelper = maybeMatchHelper(patternKind);
+ if (matchHelper) {
+ if (matchHelper.decompress) {
+ const subBindings = takeBinding();
+ passStyleOf(subBindings) === 'copyArray' ||
+ assert.fail(
+ X`Pattern ${q(
+ innerPattern,
+ )} expected nested bindings array: ${subBindings}`,
+ );
+
+ return matchHelper.decompress(
+ subBindings,
+ innerPattern.payload,
+ decompress,
+ );
+ } else {
+ return takeBinding();
+ }
+ }
+ }
+ assert.fail(
+ X`unrecognized pattern kind: ${q(patternKind)} ${q(innerPattern)}`,
+ );
+ }
+ };
+
+ return decompressRecur(pattern);
+};
+harden(decompress);
diff --git a/packages/store/src/patterns/patternMatchers.js b/packages/store/src/patterns/patternMatchers.js
index 2563ff03f7e..a7845438642 100644
--- a/packages/store/src/patterns/patternMatchers.js
+++ b/packages/store/src/patterns/patternMatchers.js
@@ -12,6 +12,7 @@ import {
applyLabelingError,
fromUniqueEntries,
listDifference,
+ objectMap,
} from '@agoric/internal';
import {
@@ -31,6 +32,8 @@ import {
checkCopyMap,
copyMapKeySet,
checkCopyBag,
+ makeCopySet,
+ makeCopyBag,
} from '../keys/checkKey.js';
///
@@ -142,6 +145,15 @@ const checkDecimalDigitsLimit = (specimen, decimalDigitsLimit, check) => {
);
};
+/**
+ * @typedef {string} Kind
+ * It is either a PassStyle other than 'tagged', or, if the underlying
+ * PassStyle is 'tagged', then the `getTag` value for tags that are
+ * recognized at the store level of abstraction. For each of those
+ * tags, a tagged record only has that kind if it satisfies the invariants
+ * that the store level associates with that kind.
+ */
+
/**
* @returns {PatternKit}
*/
@@ -157,15 +169,6 @@ const makePatternKit = () => {
// eslint-disable-next-line no-use-before-define
HelpersByMatchTag[tag];
- /**
- * @typedef {string} Kind
- * It is either a PassStyle other than 'tagged', or, if the underlying
- * PassStyle is 'tagged', then the `getTag` value for tags that are
- * recognized at the store level of abstraction. For each of those
- * tags, a tagged record only has that kind if it satisfies the invariants
- * that the store level associates with that kind.
- */
-
/**
* @type {WeakMap}
* Only for tagged records of recognized kinds whose store-level invariants
@@ -262,6 +265,16 @@ const makePatternKit = () => {
return false;
};
+ /**
+ * Checks only recognized kinds, and only if the specimen
+ * passes the invariants associated with that recognition.
+ *
+ * @param {Passable} specimen
+ * @param {Kind} kind
+ * @returns {boolean}
+ */
+ const isKind = (specimen, kind) => checkKind(specimen, kind, identChecker);
+
/**
* @param {Passable} specimen
* @param {Key} keyAsPattern
@@ -493,6 +506,11 @@ const makePatternKit = () => {
const pattValues = pattPayload.values;
const specimenValues = specimenPayload.values;
// compare values as copyArrays
+ // TODO BUG: this assumes that the keys appear in the
+ // same order, so we can compare values in that order.
+ // However, we're only guaranteed that they appear in
+ // the same rankOrder. Thus we must search one of these
+ // in the other's rankOrder.
return checkMatches(specimenValues, pattValues, check);
}
default: {
@@ -642,8 +660,15 @@ const makePatternKit = () => {
return getPassStyleCover(passStyle);
};
+ /**
+ * @param {Passable[]} array
+ * @param {Pattern} patt
+ * @param {Checker} check
+ * @param {string} [labelPrefix]
+ * @returns {boolean}
+ */
const arrayEveryMatchPattern = (array, patt, check, labelPrefix = '') => {
- if (checkKind(patt, 'match:any', identChecker)) {
+ if (isKind(patt, 'match:any')) {
// if the pattern is M.any(), we know its true
return true;
}
@@ -652,6 +677,42 @@ const makePatternKit = () => {
);
};
+ /**
+ * @param { Passable[] } array
+ * @param { Pattern } patt
+ * @param {Compress} compress
+ * @returns {Passable[] | undefined}
+ */
+ const arrayCompressMatchPattern = (array, patt, compress) => {
+ if (isKind(patt, 'match:any')) {
+ return array;
+ }
+ const bindings = [];
+ for (const el of array) {
+ const subBindings = compress(el, patt);
+ if (subBindings) {
+ // Note: not flattened
+ bindings.push(subBindings);
+ } else {
+ return undefined;
+ }
+ }
+ return harden(bindings);
+ };
+
+ /**
+ * @param {Passable[]} bindings
+ * @param {Pattern} patt
+ * @param {Decompress} decompress
+ * @returns {Passable[]}
+ */
+ const arrayDecompressMatchPattern = (bindings, patt, decompress) => {
+ if (isKind(patt, 'match:any')) {
+ return bindings;
+ }
+ return harden(bindings.map(subBindings => decompress(subBindings, patt)));
+ };
+
// /////////////////////// Match Helpers /////////////////////////////////////
/** @type {MatchHelper} */
@@ -671,11 +732,32 @@ const makePatternKit = () => {
return patts.every(patt => checkMatches(specimen, patt, check));
},
+ // Compress only according to the last conjunct
+ compress: (specimen, patts, compress) => {
+ const { length } = patts;
+ // We know there are at least two patts
+ const lastPatt = patts[length - 1];
+ const allButLast = patts.slice(0, length - 1);
+ if (
+ !allButLast.every(patt => checkMatches(specimen, patt, identChecker))
+ ) {
+ return undefined;
+ }
+ return compress(specimen, lastPatt);
+ },
+
+ decompress: (bindings, patts, decompress) => {
+ const lastPatt = patts[patts.length - 1];
+ return decompress(bindings, lastPatt);
+ },
+
checkIsWellFormed: (allegedPatts, check) => {
const checkIt = patt => checkPattern(patt, check);
return (
(passStyleOf(allegedPatts) === 'copyArray' ||
check(false, X`Needs array of sub-patterns: ${allegedPatts}`)) &&
+ (allegedPatts.length >= 2 ||
+ check(false, X`Must have at least two sub-patterns`)) &&
allegedPatts.every(checkIt)
);
},
@@ -690,13 +772,6 @@ const makePatternKit = () => {
/** @type {MatchHelper} */
const matchOrHelper = Far('match:or helper', {
checkMatches: (specimen, patts, check) => {
- const { length } = patts;
- if (length === 0) {
- return check(
- false,
- X`${specimen} - no pattern disjuncts to match: ${patts}`,
- );
- }
if (
patts.length === 2 &&
!matches(specimen, patts[0]) &&
@@ -713,6 +788,26 @@ const makePatternKit = () => {
return check(false, X`${specimen} - Must match one of ${patts}`);
},
+ // Compress to a bindings array that starts with the index of the
+ // first disjunct that succeeded, followed by the bindings according to
+ // that disjunct.
+ compress: (specimen, patts, compress) => {
+ const { length } = patts;
+ if (length === 0) {
+ return undefined;
+ }
+ for (let i = 0; i < length; i += 1) {
+ const subBindings = compress(specimen, patts[i]);
+ if (subBindings !== undefined) {
+ return harden([i, ...subBindings]);
+ }
+ }
+ return undefined;
+ },
+
+ decompress: ([i, ...subBindings], patts, decompress) =>
+ decompress(harden(subBindings), patts[i]),
+
checkIsWellFormed: matchAndHelper.checkIsWellFormed,
getRankCover: (patts, encodePassable) =>
@@ -909,7 +1004,7 @@ const makePatternKit = () => {
const matchRemotableHelper = Far('match:remotable helper', {
checkMatches: (specimen, remotableDesc, check) => {
// Unfortunate duplication of checkKind logic, but no better choices.
- if (checkKind(specimen, 'remotable', identChecker)) {
+ if (isKind(specimen, 'remotable')) {
return true;
}
if (check === identChecker) {
@@ -1071,6 +1166,21 @@ const makePatternKit = () => {
);
},
+ // Compress to an array of corresponding bindings arrays
+ compress: (specimen, [subPatt, limits = undefined], compress) => {
+ const { arrayLengthLimit } = limit(limits);
+ if (
+ isKind(specimen, 'copyArray') &&
+ specimen.length <= arrayLengthLimit
+ ) {
+ return arrayCompressMatchPattern(specimen, subPatt, compress);
+ }
+ return undefined;
+ },
+
+ decompress: (bindings, [subPatt, _limits = undefined], decompress) =>
+ arrayDecompressMatchPattern(bindings, subPatt, decompress),
+
checkIsWellFormed: (payload, check) =>
checkIsWellFormedWithLimit(
payload,
@@ -1098,6 +1208,21 @@ const makePatternKit = () => {
);
},
+ // Compress to an array of corresponding bindings arrays
+ compress: (specimen, [keyPatt, limits = undefined], compress) => {
+ const { numSetElementsLimit } = limit(limits);
+ if (
+ isKind(specimen, 'copySet') &&
+ specimen.payload.length <= numSetElementsLimit
+ ) {
+ return arrayCompressMatchPattern(specimen.payload, keyPatt, compress);
+ }
+ return undefined;
+ },
+
+ decompress: (bindings, [keyPatt, _limits = undefined], decompress) =>
+ makeCopySet(arrayDecompressMatchPattern(bindings, keyPatt, decompress)),
+
checkIsWellFormed: (payload, check) =>
checkIsWellFormedWithLimit(
payload,
@@ -1139,6 +1264,42 @@ const makePatternKit = () => {
);
},
+ // Compress to an array of corresponding bindings arrays
+ compress: (
+ specimen,
+ [keyPatt, countPatt, limits = undefined],
+ compress,
+ ) => {
+ const { numUniqueBagElementsLimit, decimalDigitsLimit } = limit(limits);
+ if (
+ isKind(specimen, 'copyBag') &&
+ specimen.payload.length <= numUniqueBagElementsLimit &&
+ specimen.payload.every(([_key, count]) =>
+ checkDecimalDigitsLimit(count, decimalDigitsLimit, identChecker),
+ )
+ ) {
+ return arrayCompressMatchPattern(
+ specimen.payload,
+ harden([keyPatt, countPatt]),
+ compress,
+ );
+ }
+ return undefined;
+ },
+
+ decompress: (
+ bindings,
+ [keyPatt, countPatt, _limits = undefined],
+ decompress,
+ ) =>
+ makeCopyBag(
+ arrayDecompressMatchPattern(
+ bindings,
+ harden([keyPatt, countPatt]),
+ decompress,
+ ),
+ ),
+
checkIsWellFormed: (payload, check) =>
checkIsWellFormedWithLimit(
payload,
@@ -1182,6 +1343,48 @@ const makePatternKit = () => {
);
},
+ // Compress to a pair of bindings arrays, one for the keys
+ // and a matching one for the values.
+ compress: (
+ specimen,
+ [keyPatt, valuePatt, limits = undefined],
+ compress,
+ ) => {
+ const { numMapEntriesLimit } = limit(limits);
+ if (
+ isKind(specimen, 'copyMap') &&
+ specimen.payload.keys.length <= numMapEntriesLimit
+ ) {
+ return harden([
+ arrayCompressMatchPattern(specimen.payload.keys, keyPatt, compress),
+ arrayCompressMatchPattern(
+ specimen.payload.values,
+ valuePatt,
+ compress,
+ ),
+ ]);
+ }
+ return undefined;
+ },
+
+ decompress: (
+ [keyBindings, valueBindings],
+ [keyPatt, valuePatt, _limits = undefined],
+ decompress,
+ ) => {
+ return makeTagged(
+ 'copyMap',
+ harden({
+ keys: arrayDecompressMatchPattern(keyBindings, keyPatt, decompress),
+ values: arrayDecompressMatchPattern(
+ valueBindings,
+ valuePatt,
+ decompress,
+ ),
+ }),
+ );
+ },
+
checkIsWellFormed: (payload, check) =>
checkIsWellFormedWithLimit(
payload,
@@ -1264,6 +1467,47 @@ const makePatternKit = () => {
);
},
+ compress: (
+ specimen,
+ [requiredPatt, optionalPatt = [], restPatt = MM.any()],
+ compress,
+ ) => {
+ if (!checkKind(specimen, 'copyArray', identChecker)) {
+ return undefined;
+ }
+ const { requiredSpecimen, optionalSpecimen, restSpecimen } =
+ splitArrayParts(specimen, requiredPatt, optionalPatt);
+ const partialPatt = adaptArrayPattern(
+ optionalPatt,
+ optionalSpecimen.length,
+ );
+ const bindings = harden([
+ compress(requiredSpecimen, requiredPatt),
+ compress(optionalSpecimen, partialPatt),
+ compress(restSpecimen, restPatt),
+ ]);
+ if (bindings.some(subBinding => subBinding === undefined)) {
+ return undefined;
+ }
+ return bindings;
+ },
+
+ decompress: (
+ [requiredBindings, partialBindings, restBindings],
+ [requiredPatt, optionalPatt = [], restPatt = MM.any()],
+ decompress,
+ ) => {
+ const partialPatt = adaptArrayPattern(
+ optionalPatt,
+ partialBindings.length,
+ );
+ return [
+ ...decompress(requiredBindings, requiredPatt),
+ ...decompress(partialBindings, partialPatt),
+ ...decompress(restBindings, restPatt),
+ ];
+ },
+
/**
* @param {Array} splitArray
* @param {Checker} check
@@ -1376,6 +1620,53 @@ const makePatternKit = () => {
);
},
+ compress: (
+ specimen,
+ [requiredPatt, optionalPatt = {}, restPatt = MM.any()],
+ compress,
+ ) => {
+ if (!checkKind(specimen, 'copyRecord', identChecker)) {
+ return undefined;
+ }
+ const { requiredSpecimen, optionalSpecimen, restSpecimen } =
+ splitRecordParts(specimen, requiredPatt, optionalPatt);
+ const partialNames = /** @type {string[]} */ (ownKeys(optionalSpecimen));
+ const partialPatt = adaptRecordPattern(optionalPatt, partialNames);
+ const bindings = harden([
+ compress(requiredSpecimen, requiredPatt),
+ // The bindings must record which optional field names were
+ // present
+ objectMap(partialPatt, (fieldPatt, fieldName) =>
+ compress(optionalSpecimen[fieldName], fieldPatt),
+ ),
+ compress(restSpecimen, restPatt),
+ ]);
+ if (bindings.some(subBinding => subBinding === undefined)) {
+ return undefined;
+ }
+ return bindings;
+ },
+
+ decompress: (
+ [requiredBindings, partialBindings, restBindings],
+ [requiredPatt, optionalPatt = {}, restPatt = MM.any()],
+ decompress,
+ ) => {
+ const partialNames = /** @type {string[]} */ (ownKeys(partialBindings));
+ const partialPatt = adaptRecordPattern(optionalPatt, partialNames);
+
+ const allEntries = [
+ ...entries(decompress(requiredBindings, requiredPatt)),
+ ...entries(
+ objectMap(partialPatt, (fieldPatt, fieldName) =>
+ decompress(partialBindings[fieldName], fieldPatt),
+ ),
+ ),
+ ...entries(decompress(restBindings, restPatt)),
+ ];
+ return fromUniqueEntries(allEntries);
+ },
+
/**
* @param {Array} splitArray
* @param {Checker} check
@@ -1567,8 +1858,20 @@ const makePatternKit = () => {
/** @type {MatcherNamespace} */
const M = harden({
any: () => AnyShape,
- and: (...patts) => makeMatcher('match:and', patts),
- or: (...patts) => makeMatcher('match:or', patts),
+ and: (...patts) =>
+ // eslint-disable-next-line no-nested-ternary
+ patts.length === 0
+ ? M.any()
+ : patts.length === 1
+ ? patts[0]
+ : makeMatcher('match:and', patts),
+ or: (...patts) =>
+ // eslint-disable-next-line no-nested-ternary
+ patts.length === 0
+ ? M.not(M.any())
+ : patts.length === 1
+ ? patts[0]
+ : makeMatcher('match:or', patts),
not: subPatt => makeMatcher('match:not', subPatt),
scalar: () => ScalarShape,
@@ -1675,6 +1978,8 @@ const makePatternKit = () => {
assertPattern,
isPattern,
getRankCover,
+ kindOf,
+ maybeMatchHelper,
M,
});
};
@@ -1692,6 +1997,8 @@ export const {
assertPattern,
isPattern,
getRankCover,
+ kindOf,
+ maybeMatchHelper,
M,
} = makePatternKit();
diff --git a/packages/store/src/types.js b/packages/store/src/types.js
index 1c559b21ccd..66589b1f5ab 100644
--- a/packages/store/src/types.js
+++ b/packages/store/src/types.js
@@ -6,6 +6,7 @@
/** @template T @typedef {import('@endo/marshal').CopyRecord} CopyRecord */
/** @template T @typedef {import('@endo/marshal').CopyArray} CopyArray */
/** @typedef {import('@endo/marshal').Checker} Checker */
+/** @typedef {import('./patterns/patternMatchers').Kind} Kind */
/**
* @typedef {Passable} Key
@@ -781,11 +782,35 @@
* @property {(patt: Pattern) => void} assertPattern
* @property {(patt: Passable) => boolean} isPattern
* @property {GetRankCover} getRankCover
+ * @property {(passable: Passable, check?: Checker) => (Kind | undefined)} kindOf
+ * @property {(tag: string) => (MatchHelper | undefined)} maybeMatchHelper
* @property {MatcherNamespace} M
*/
// /////////////////////////////////////////////////////////////////////////////
+/**
+ * @callback Compress
+ * @param {Passable} specimen
+ * @param {Pattern} pattern
+ * @returns {Passable[] | undefined}
+ */
+
+/**
+ * @callback MustCompress
+ * @param {Passable} specimen
+ * @param {Pattern} pattern
+ * @param {string|number} [label]
+ * @returns {Passable[]}
+ */
+
+/**
+ * @callback Decompress
+ * @param {Passable[]} bindings
+ * @param {Pattern} pattern
+ * @returns {Passable}
+ */
+
// TODO
// The following type should be in internal-types.js, since the
// `MatchHelper` type is purely internal to this package. However,
@@ -812,6 +837,27 @@
* Assuming validity of `matcherPayload` as the payload of a Matcher corresponding
* with this MatchHelper, reports whether `specimen` is matched by that Matcher.
*
+ * @property {(specimen: Passable,
+ * matcherPayload: Passable,
+ * compress: Compress
+ * ) => (Passable[] | undefined)} [compress]
+ * Assuming a valid Matcher of this type with `matcherPayload` as its
+ * payload, if this specimen matches this matcher, then return a
+ * "bindings" array of passables that represents this specimen,
+ * perhaps more compactly, given the knowledge that it matches this matcher.
+ * If the specimen does not match the matcher, return undefined.
+ * If this matcher has a `compress` method, then it must have a matching
+ * `decompress` method.
+ *
+ * @property {(bindings: Passable[],
+ * matcherPayload: Passable,
+ * decompress: Decompress
+ * ) => Passable} [decompress]
+ * If `bindings` is the result of a successful `compress` with this matcher,
+ * then `decompress` must return a Passable equivalent to the original specimen.
+ * If this matcher has an `decompress` method, then it must have a matching
+ * `compress` method.
+ *
* @property {(
* payload: Passable,
* encodePassable: KeyToDBKey
diff --git a/packages/store/test/test-compress.js b/packages/store/test/test-compress.js
new file mode 100644
index 00000000000..20bef674ef6
--- /dev/null
+++ b/packages/store/test/test-compress.js
@@ -0,0 +1,166 @@
+// @ts-check
+
+import { test } from '@agoric/swingset-vat/tools/prepare-test-env-ava.js';
+import { Far } from '@endo/marshal';
+import {
+ makeCopyBagFromElements,
+ makeCopyMap,
+ makeCopySet,
+} from '../src/keys/checkKey.js';
+import {
+ compress,
+ decompress,
+ mustCompress,
+} from '../src/patterns/compress.js';
+import { M } from '../src/patterns/patternMatchers.js';
+
+const runTests = testTriple => {
+ const brand = Far('simoleans', {});
+ const moolaBrand = Far('moola', {});
+ const timer = Far('timer', {});
+
+ testTriple({ brand, value: 37n }, { brand, value: M.bigint() }, [37n]);
+ testTriple(
+ { brand, value: 37n },
+ { brand: M.remotable(), value: M.bigint() },
+ [37n, brand],
+ );
+ testTriple(
+ { brand, value: 37n },
+ { brand: M.bigint(), value: M.bigint() },
+ undefined,
+ 'test mustCompress: brand: remotable "[Alleged: simoleans]" - Must be a bigint',
+ );
+ testTriple({ brand, value: 37n }, M.any(), [{ brand, value: 37n }]);
+ testTriple({ brand, value: 37n }, M.recordOf(M.string(), M.scalar()), [
+ { brand, value: 37n },
+ ]);
+ testTriple(
+ [{ foo: 'a' }, { foo: 'b' }, { foo: 'c' }],
+ M.arrayOf(harden({ foo: M.string() })),
+ [[['a'], ['b'], ['c']]],
+ );
+ testTriple(
+ makeCopySet([{ foo: 'a' }, { foo: 'b' }, { foo: 'c' }]),
+ M.setOf(harden({ foo: M.string() })),
+ [[['c'], ['b'], ['a']]],
+ );
+ testTriple(
+ makeCopyBagFromElements([{ foo: 'a' }, { foo: 'a' }, { foo: 'c' }]),
+ M.bagOf(harden({ foo: M.string() })),
+ [
+ [
+ ['c', 1n],
+ ['a', 2n],
+ ],
+ ],
+ );
+ testTriple(
+ makeCopyMap([
+ [{ foo: 'a' }, { bar: 1 }],
+ [{ foo: 'b' }, { bar: 2 }],
+ [{ foo: 'c' }, { bar: 3 }],
+ ]),
+ M.mapOf(harden({ foo: M.string() }), harden({ bar: M.number() })),
+ [
+ [
+ [['c'], ['b'], ['a']],
+ [[3], [2], [1]],
+ ],
+ ],
+ );
+ testTriple(
+ makeCopyMap([
+ [{ foo: 'c' }, { bar: 3 }],
+ [{ foo: 'b' }, { bar: 2 }],
+ [{ foo: 'a' }, { bar: 1 }],
+ ]),
+ // TODO Add a test case where the keys are in the same rankOrder but not
+ // the same order.
+ makeCopyMap([
+ [{ foo: 'c' }, M.any()],
+ // @ts-expect-error The array need not be generic
+ [{ foo: 'b' }, { bar: M.number() }],
+ [{ foo: 'a' }, { bar: 1 }],
+ ]),
+ [{ bar: 3 }, 2],
+ );
+ testTriple(
+ {
+ want: { Winnings: { brand: moolaBrand, value: ['x', 'y'] } },
+ give: { Bid: { brand, value: 37n } },
+ exit: { afterDeadline: { deadline: 11n, timer } },
+ },
+ {
+ want: { Winnings: { brand: moolaBrand, value: M.array() } },
+ give: { Bid: { brand, value: M.nat() } },
+ exit: { afterDeadline: { deadline: M.gte(10n), timer } },
+ },
+ [['x', 'y'], 37n, 11n],
+ );
+ testTriple(
+ 'orange',
+ M.or('red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet'),
+ [[1]],
+ );
+ testTriple(
+ { x: 3, y: 5 },
+ M.or(harden({ x: M.number(), y: M.number() }), M.bigint(), M.record()),
+ [[0, 5, 3]],
+ );
+ testTriple(
+ [5n],
+ M.or(harden({ x: M.number(), y: M.number() }), [M.bigint()], M.record()),
+ [[1, 5n]],
+ );
+ testTriple(
+ { x: 3, y: 5, z: 9 },
+ M.or(harden({ x: M.number(), y: M.number() }), M.bigint(), M.record()),
+ [[2, { x: 3, y: 5, z: 9 }]],
+ );
+ testTriple(
+ {
+ brand,
+ value: [{ bar: 2 }, { bar: 1 }],
+ },
+ {
+ brand,
+ value: M.arrayOf(M.and(M.key(), { bar: M.number() })),
+ },
+ [[[[2]], [[1]]]],
+ );
+};
+
+test('compression', t => {
+ const testCompress = (specimen, pattern, bindings) =>
+ t.deepEqual(compress(harden(specimen), harden(pattern)), harden(bindings));
+ runTests(testCompress);
+});
+
+test('test mustCompress', t => {
+ const testCompress = (specimen, pattern, bindings, message) => {
+ if (bindings === undefined) {
+ t.throws(
+ () =>
+ mustCompress(harden(specimen), harden(pattern), 'test mustCompress'),
+ { message },
+ );
+ } else {
+ t.deepEqual(
+ mustCompress(harden(specimen), harden(pattern), 'test mustCompress'),
+ harden(bindings),
+ );
+ }
+ };
+ runTests(testCompress);
+});
+
+test('decompression', t => {
+ const testDecompress = (specimen, pattern, bindings) =>
+ bindings === undefined ||
+ t.deepEqual(
+ decompress(harden(bindings), harden(pattern)),
+ harden(specimen),
+ );
+ runTests(testDecompress);
+});
diff --git a/packages/store/test/test-patterns.js b/packages/store/test/test-patterns.js
index e297e9cd06f..e9841d110ae 100644
--- a/packages/store/test/test-patterns.js
+++ b/packages/store/test/test-patterns.js
@@ -39,7 +39,7 @@ const runTests = (successCase, failCase) => {
failCase(specimen, M.gte('x'), '3 - Must be >= "x"');
failCase(specimen, M.and(3, 4), '3 - Must be: 4');
failCase(specimen, M.or(4, 4), '3 - Must match one of [4,4]');
- failCase(specimen, M.or(), '3 - no pattern disjuncts to match: []');
+ failCase(specimen, M.or(), '3 - Must fail negated pattern: "[match:any]"');
}
{
const specimen = [3, 4];
diff --git a/packages/swingset-liveslots/src/collectionManager.js b/packages/swingset-liveslots/src/collectionManager.js
index 493ed519fb1..d6b96704dd0 100644
--- a/packages/swingset-liveslots/src/collectionManager.js
+++ b/packages/swingset-liveslots/src/collectionManager.js
@@ -14,6 +14,8 @@ import {
M,
makeCopySet,
makeCopyMap,
+ mustCompress,
+ decompress,
} from '@agoric/store';
import { Far, passStyleOf } from '@endo/marshal';
import { makeBaseRef, parseVatSlot } from './parseVatSlots.js';
@@ -208,18 +210,17 @@ export function makeCollectionManager(
const invalidValueTypeMsg = `invalid value type for collection ${q(label)}`;
const serializeValue = value => {
- if (valueShape !== undefined) {
- mustMatch(value, valueShape, invalidValueTypeMsg);
+ if (valueShape === undefined) {
+ return serialize(value);
}
- return serialize(value);
+ return serialize(mustCompress(value, valueShape, invalidValueTypeMsg));
};
const unserializeValue = data => {
- const value = unserialize(data);
- if (valueShape !== undefined) {
- mustMatch(value, valueShape, invalidValueTypeMsg);
+ if (valueShape === undefined) {
+ return unserialize(data);
}
- return value;
+ return decompress(unserialize(data), valueShape);
};
function prefix(dbEntryKey) {
diff --git a/packages/swingset-liveslots/src/virtualObjectManager.js b/packages/swingset-liveslots/src/virtualObjectManager.js
index d086499b413..bed00fd09ae 100644
--- a/packages/swingset-liveslots/src/virtualObjectManager.js
+++ b/packages/swingset-liveslots/src/virtualObjectManager.js
@@ -2,7 +2,7 @@
import { assert, Fail, q } from '@agoric/assert';
import { objectMap } from '@agoric/internal';
-import { assertPattern, mustMatch } from '@agoric/store';
+import { assertPattern, decompress, mustCompress } from '@agoric/store';
import { defendPrototype, defendPrototypeKit } from '@agoric/store/tools.js';
import { Far, passStyleOf } from '@endo/marshal';
import { parseVatSlot, makeBaseRef } from './parseVatSlots.js';
@@ -151,22 +151,26 @@ export function makeCache(size, fetch, store) {
/**
* Create a new virtual object manager. There is one of these for each vat.
*
- * @param {*} syscall Vat's syscall object, used to access the vatstore operations.
- * @param {*} vrm Virtual reference manager, to handle reference counting and GC
- * of virtual references.
- * @param {() => number} allocateExportID Function to allocate the next object
- * export ID for the enclosing vat.
- * @param {(val: object) => string} _getSlotForVal A function that returns the
- * object ID (vref) for a given object, if any. their corresponding export
- * IDs
- * @param {*} registerValue Function to register a new slot+value in liveSlot's
- * various tables
- * @param {import('@endo/marshal').Serialize} serialize Serializer for this vat
- * @param {import('@endo/marshal').Unserialize} unserialize Unserializer for this vat
- * @param {number} cacheSize How many virtual objects this manager should cache
- * in memory.
- * @param {*} assertAcceptableSyscallCapdataSize Function to check for oversized
- * syscall params
+ * @param {*} syscall
+ * Vat's syscall object, used to access the vatstore operations.
+ * @param {*} vrm
+ * Virtual reference manager, to handle reference counting and GC
+ * of virtual references.
+ * @param {() => number} allocateExportID
+ * Function to allocate the next object export ID for the enclosing vat.
+ * @param {(val: object) => string} _getSlotForVal
+ * A function that returns the object ID (vref) for a given object, if any.
+ * their corresponding export IDs
+ * @param {*} registerValue
+ * Function to register a new slot+value in liveSlot's various tables
+ * @param {import('@endo/marshal').Serialize} serialize
+ * Serializer for this vat
+ * @param {import('@endo/marshal').Unserialize} unserialize
+ * Unserializer for this vat
+ * @param {number} cacheSize
+ * How many virtual objects this manager should cache in memory.
+ * @param {*} assertAcceptableSyscallCapdataSize
+ * Function to check for oversized syscall params
*
* @returns {object} a new virtual object manager.
*
@@ -608,26 +612,25 @@ export function makeVirtualObjectManager(
assertPattern(stateShape);
const serializeSlot = (slotState, prop) => {
- if (stateShape !== undefined) {
- hasOwn(stateShape, prop) ||
- Fail`State must only have fields described by stateShape: ${q(
- ownKeys(stateShape),
- )}`;
- mustMatch(slotState, stateShape[prop], prop);
+ if (stateShape === undefined) {
+ return serialize(slotState);
}
- return serialize(slotState);
+ hasOwn(stateShape, prop) ||
+ Fail`State must only have fields described by stateShape: ${q(
+ ownKeys(stateShape),
+ )}`;
+ return serialize(mustCompress(slotState, stateShape[prop], prop));
};
const unserializeSlot = (slotData, prop) => {
- const slotValue = unserialize(slotData);
- if (stateShape !== undefined) {
- hasOwn(stateShape, prop) ||
- Fail`State only has fields described by stateShape: ${q(
- ownKeys(stateShape),
- )}`;
- mustMatch(slotValue, stateShape[prop]);
+ if (stateShape === undefined) {
+ return unserialize(slotData);
}
- return slotValue;
+ hasOwn(stateShape, prop) ||
+ Fail`State only has fields described by stateShape: ${q(
+ ownKeys(stateShape),
+ )}`;
+ return decompress(unserialize(slotData), stateShape[prop]);
};
const getInnerSelf = state => {